Fixed pr-number typo in last ChangeLog entry.
[official-gcc.git] / gcc / fold-const.c
blobdaad1ba3d41f5ba6840bc5f23c9598c8ff709d35
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
45 sets TREE_OVERFLOW.
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "tm.h"
55 #include "flags.h"
56 #include "tree.h"
57 #include "real.h"
58 #include "rtl.h"
59 #include "expr.h"
60 #include "tm_p.h"
61 #include "toplev.h"
62 #include "intl.h"
63 #include "ggc.h"
64 #include "hashtab.h"
65 #include "langhooks.h"
66 #include "md5.h"
68 /* Non-zero if we are folding constants inside an initializer; zero
69 otherwise. */
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
76 COMPCODE_FALSE = 0,
77 COMPCODE_LT = 1,
78 COMPCODE_EQ = 2,
79 COMPCODE_LE = 3,
80 COMPCODE_GT = 4,
81 COMPCODE_LTGT = 5,
82 COMPCODE_GE = 6,
83 COMPCODE_ORD = 7,
84 COMPCODE_UNORD = 8,
85 COMPCODE_UNLT = 9,
86 COMPCODE_UNEQ = 10,
87 COMPCODE_UNLE = 11,
88 COMPCODE_UNGT = 12,
89 COMPCODE_NE = 13,
90 COMPCODE_UNGE = 14,
91 COMPCODE_TRUE = 15
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
116 tree *, tree *);
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 tree);
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
153 addition.
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 sign. */
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
165 #define LOWPART(x) \
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
175 static void
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
188 static void
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
190 HOST_WIDE_INT *hi)
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
207 unsigned int prec;
208 int sign_extended_type;
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
212 prec = POINTER_SIZE;
213 else
214 prec = TYPE_PRECISION (type);
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 else
228 h1 = 0;
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)l1 < 0)
248 h1 = -1;
250 else
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 h1 = -1;
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
260 *lv = l1;
261 *hv = h1;
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
282 tree
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
285 bool overflowed)
287 int sign_extended_type;
288 bool overflow;
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
295 overflow = fit_double_type (low, high, &low, &high, type);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
300 if (overflowed
301 || overflowable < 0
302 || (overflowable > 0 && sign_extended_type))
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
309 return t;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
327 bool unsigned_p)
329 unsigned HOST_WIDE_INT l;
330 HOST_WIDE_INT h;
332 l = l1 + l2;
333 h = h1 + h2 + (l < l1);
335 *lv = l;
336 *hv = h;
338 if (unsigned_p)
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
340 else
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 if (l1 == 0)
355 *lv = 0;
356 *hv = - h1;
357 return (*hv & h1) < 0;
359 else
361 *lv = -l1;
362 *hv = ~h1;
363 return 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377 bool unsigned_p)
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
383 int i, j, k;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
394 carry = 0;
395 for (j = 0; j < 4; j++)
397 k = i + j;
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
401 carry += prod[k];
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
405 prod[i + 4] = carry;
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
412 if (unsigned_p)
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
417 if (h1 < 0)
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
422 if (h2 < 0)
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
436 void
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
443 if (count < 0)
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 return;
449 if (SHIFT_COUNT_TRUNCATED)
450 count %= prec;
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
456 *hv = 0;
457 *lv = 0;
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *lv = 0;
464 else
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 *lv = l1 << count;
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 else
487 *hv = signmask;
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
498 void
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502 int arith)
504 unsigned HOST_WIDE_INT signmask;
506 signmask = (arith
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508 : 0);
510 if (SHIFT_COUNT_TRUNCATED)
511 count %= prec;
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
517 *hv = 0;
518 *lv = 0;
520 else if (count >= HOST_BITS_PER_WIDE_INT)
522 *hv = 0;
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
525 else
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528 *lv = ((l1 >> count)
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
536 *hv = signmask;
537 *lv = signmask;
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 else
548 *hv = signmask;
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
559 void
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
567 count %= prec;
568 if (count < 0)
569 count += prec;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 *lv = s1l | s2l;
574 *hv = s1h | s2h;
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
581 void
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
589 count %= prec;
590 if (count < 0)
591 count += prec;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 *lv = s1l | s2l;
596 *hv = s1h | s2h;
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603 or EXACT_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT *hrem)
618 int quo_neg = 0;
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
621 int i, j;
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
628 int overflow = 0;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
634 if (!uns)
636 if (hnum < 0)
638 quo_neg = ~ quo_neg;
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
642 overflow = 1;
644 if (hden < 0)
646 quo_neg = ~ quo_neg;
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
653 *hquo = *hrem = 0;
654 /* This unsigned division rounds toward zero. */
655 *lquo = lnum / lden;
656 goto finish_up;
659 if (hnum == 0)
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
662 *hquo = *lquo = 0;
663 *hrem = hnum;
664 *lrem = lnum;
665 goto finish_up;
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
684 carry = work % lden;
687 else
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
696 if (den[i] != 0)
698 den_hi_sig = i;
699 break;
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
706 if (scale > 1)
707 { /* scale divisor and dividend */
708 carry = 0;
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
716 num[4] = carry;
717 carry = 0;
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
727 num_hi_sig = 4;
729 /* Main loop */
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
741 else
742 quo_est = BASE - 1;
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
746 if (tmp < BASE
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
749 quo_est--;
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
755 carry = 0;
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
769 quo_est--;
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
782 quo[i] = quo_est;
786 decode (quo, lquo, hquo);
788 finish_up:
789 /* If result is negative, make it so. */
790 if (quo_neg)
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 switch (code)
800 case TRUNC_DIV_EXPR:
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 return overflow;
805 case FLOOR_DIV_EXPR:
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
811 lquo, hquo);
813 else
814 return overflow;
815 break;
817 case CEIL_DIV_EXPR:
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822 lquo, hquo);
824 else
825 return overflow;
826 break;
828 case ROUND_DIV_EXPR:
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
837 if (*hrem < 0)
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839 if (hden < 0)
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, &ltwice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
852 if (*hquo < 0)
853 /* quo = quo - 1; */
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856 else
857 /* quo = quo + 1; */
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859 lquo, hquo);
861 else
862 return overflow;
864 break;
866 default:
867 gcc_unreachable ();
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 return overflow;
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
881 static tree
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
899 return NULL_TREE;
901 return build_int_cst_wide (type, quol, quoh);
904 /* This is non-zero if we should defer warnings about undefined
905 overflow. This facility exists because these warnings are a
906 special case. The code to estimate loop iterations does not want
907 to issue any warnings, since it works with expressions which do not
908 occur in user code. Various bits of cleanup code call fold(), but
909 only use the result if it has certain characteristics (e.g., is a
910 constant); that code only wants to issue a warning if the result is
911 used. */
913 static int fold_deferring_overflow_warnings;
915 /* If a warning about undefined overflow is deferred, this is the
916 warning. Note that this may cause us to turn two warnings into
917 one, but that is fine since it is sufficient to only give one
918 warning per expression. */
920 static const char* fold_deferred_overflow_warning;
922 /* If a warning about undefined overflow is deferred, this is the
923 level at which the warning should be emitted. */
925 static enum warn_strict_overflow_code fold_deferred_overflow_code;
927 /* Start deferring overflow warnings. We could use a stack here to
928 permit nested calls, but at present it is not necessary. */
930 void
931 fold_defer_overflow_warnings (void)
933 ++fold_deferring_overflow_warnings;
936 /* Stop deferring overflow warnings. If there is a pending warning,
937 and ISSUE is true, then issue the warning if appropriate. STMT is
938 the statement with which the warning should be associated (used for
939 location information); STMT may be NULL. CODE is the level of the
940 warning--a warn_strict_overflow_code value. This function will use
941 the smaller of CODE and the deferred code when deciding whether to
942 issue the warning. CODE may be zero to mean to always use the
943 deferred code. */
945 void
946 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
948 const char *warnmsg;
949 location_t locus;
951 gcc_assert (fold_deferring_overflow_warnings > 0);
952 --fold_deferring_overflow_warnings;
953 if (fold_deferring_overflow_warnings > 0)
955 if (fold_deferred_overflow_warning != NULL
956 && code != 0
957 && code < (int) fold_deferred_overflow_code)
958 fold_deferred_overflow_code = code;
959 return;
962 warnmsg = fold_deferred_overflow_warning;
963 fold_deferred_overflow_warning = NULL;
965 if (!issue || warnmsg == NULL)
966 return;
968 /* Use the smallest code level when deciding to issue the
969 warning. */
970 if (code == 0 || code > (int) fold_deferred_overflow_code)
971 code = fold_deferred_overflow_code;
973 if (!issue_strict_overflow_warning (code))
974 return;
976 if (stmt == NULL_TREE || !expr_has_location (stmt))
977 locus = input_location;
978 else
979 locus = expr_location (stmt);
980 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
983 /* Stop deferring overflow warnings, ignoring any deferred
984 warnings. */
986 void
987 fold_undefer_and_ignore_overflow_warnings (void)
989 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
992 /* Whether we are deferring overflow warnings. */
994 bool
995 fold_deferring_overflow_warnings_p (void)
997 return fold_deferring_overflow_warnings > 0;
1000 /* This is called when we fold something based on the fact that signed
1001 overflow is undefined. */
1003 static void
1004 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1006 gcc_assert (!flag_wrapv && !flag_trapv);
1007 if (fold_deferring_overflow_warnings > 0)
1009 if (fold_deferred_overflow_warning == NULL
1010 || wc < fold_deferred_overflow_code)
1012 fold_deferred_overflow_warning = gmsgid;
1013 fold_deferred_overflow_code = wc;
1016 else if (issue_strict_overflow_warning (wc))
1017 warning (OPT_Wstrict_overflow, gmsgid);
1020 /* Return true if the built-in mathematical function specified by CODE
1021 is odd, i.e. -f(x) == f(-x). */
1023 static bool
1024 negate_mathfn_p (enum built_in_function code)
1026 switch (code)
1028 CASE_FLT_FN (BUILT_IN_ASIN):
1029 CASE_FLT_FN (BUILT_IN_ASINH):
1030 CASE_FLT_FN (BUILT_IN_ATAN):
1031 CASE_FLT_FN (BUILT_IN_ATANH):
1032 CASE_FLT_FN (BUILT_IN_CASIN):
1033 CASE_FLT_FN (BUILT_IN_CASINH):
1034 CASE_FLT_FN (BUILT_IN_CATAN):
1035 CASE_FLT_FN (BUILT_IN_CATANH):
1036 CASE_FLT_FN (BUILT_IN_CBRT):
1037 CASE_FLT_FN (BUILT_IN_CPROJ):
1038 CASE_FLT_FN (BUILT_IN_CSIN):
1039 CASE_FLT_FN (BUILT_IN_CSINH):
1040 CASE_FLT_FN (BUILT_IN_CTAN):
1041 CASE_FLT_FN (BUILT_IN_CTANH):
1042 CASE_FLT_FN (BUILT_IN_ERF):
1043 CASE_FLT_FN (BUILT_IN_LLROUND):
1044 CASE_FLT_FN (BUILT_IN_LROUND):
1045 CASE_FLT_FN (BUILT_IN_ROUND):
1046 CASE_FLT_FN (BUILT_IN_SIN):
1047 CASE_FLT_FN (BUILT_IN_SINH):
1048 CASE_FLT_FN (BUILT_IN_TAN):
1049 CASE_FLT_FN (BUILT_IN_TANH):
1050 CASE_FLT_FN (BUILT_IN_TRUNC):
1051 return true;
1053 CASE_FLT_FN (BUILT_IN_LLRINT):
1054 CASE_FLT_FN (BUILT_IN_LRINT):
1055 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1056 CASE_FLT_FN (BUILT_IN_RINT):
1057 return !flag_rounding_math;
1059 default:
1060 break;
1062 return false;
1065 /* Check whether we may negate an integer constant T without causing
1066 overflow. */
1068 bool
1069 may_negate_without_overflow_p (tree t)
1071 unsigned HOST_WIDE_INT val;
1072 unsigned int prec;
1073 tree type;
1075 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1077 type = TREE_TYPE (t);
1078 if (TYPE_UNSIGNED (type))
1079 return false;
1081 prec = TYPE_PRECISION (type);
1082 if (prec > HOST_BITS_PER_WIDE_INT)
1084 if (TREE_INT_CST_LOW (t) != 0)
1085 return true;
1086 prec -= HOST_BITS_PER_WIDE_INT;
1087 val = TREE_INT_CST_HIGH (t);
1089 else
1090 val = TREE_INT_CST_LOW (t);
1091 if (prec < HOST_BITS_PER_WIDE_INT)
1092 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1093 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1096 /* Determine whether an expression T can be cheaply negated using
1097 the function negate_expr without introducing undefined overflow. */
1099 static bool
1100 negate_expr_p (tree t)
1102 tree type;
1104 if (t == 0)
1105 return false;
1107 type = TREE_TYPE (t);
1109 STRIP_SIGN_NOPS (t);
1110 switch (TREE_CODE (t))
1112 case INTEGER_CST:
1113 if (TYPE_OVERFLOW_WRAPS (type))
1114 return true;
1116 /* Check that -CST will not overflow type. */
1117 return may_negate_without_overflow_p (t);
1118 case BIT_NOT_EXPR:
1119 return (INTEGRAL_TYPE_P (type)
1120 && TYPE_OVERFLOW_WRAPS (type));
1122 case REAL_CST:
1123 case NEGATE_EXPR:
1124 return true;
1126 case COMPLEX_CST:
1127 return negate_expr_p (TREE_REALPART (t))
1128 && negate_expr_p (TREE_IMAGPART (t));
1130 case COMPLEX_EXPR:
1131 return negate_expr_p (TREE_OPERAND (t, 0))
1132 && negate_expr_p (TREE_OPERAND (t, 1));
1134 case CONJ_EXPR:
1135 return negate_expr_p (TREE_OPERAND (t, 0));
1137 case PLUS_EXPR:
1138 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1139 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1140 return false;
1141 /* -(A + B) -> (-B) - A. */
1142 if (negate_expr_p (TREE_OPERAND (t, 1))
1143 && reorder_operands_p (TREE_OPERAND (t, 0),
1144 TREE_OPERAND (t, 1)))
1145 return true;
1146 /* -(A + B) -> (-A) - B. */
1147 return negate_expr_p (TREE_OPERAND (t, 0));
1149 case MINUS_EXPR:
1150 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1151 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1152 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1));
1156 case MULT_EXPR:
1157 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1158 break;
1160 /* Fall through. */
1162 case RDIV_EXPR:
1163 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1164 return negate_expr_p (TREE_OPERAND (t, 1))
1165 || negate_expr_p (TREE_OPERAND (t, 0));
1166 break;
1168 case TRUNC_DIV_EXPR:
1169 case ROUND_DIV_EXPR:
1170 case FLOOR_DIV_EXPR:
1171 case CEIL_DIV_EXPR:
1172 case EXACT_DIV_EXPR:
1173 /* In general we can't negate A / B, because if A is INT_MIN and
1174 B is 1, we may turn this into INT_MIN / -1 which is undefined
1175 and actually traps on some architectures. But if overflow is
1176 undefined, we can negate, because - (INT_MIN / 1) is an
1177 overflow. */
1178 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1179 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1180 break;
1181 return negate_expr_p (TREE_OPERAND (t, 1))
1182 || negate_expr_p (TREE_OPERAND (t, 0));
1184 case NOP_EXPR:
1185 /* Negate -((double)float) as (double)(-float). */
1186 if (TREE_CODE (type) == REAL_TYPE)
1188 tree tem = strip_float_extensions (t);
1189 if (tem != t)
1190 return negate_expr_p (tem);
1192 break;
1194 case CALL_EXPR:
1195 /* Negate -f(x) as f(-x). */
1196 if (negate_mathfn_p (builtin_mathfn_code (t)))
1197 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1198 break;
1200 case RSHIFT_EXPR:
1201 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1202 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1204 tree op1 = TREE_OPERAND (t, 1);
1205 if (TREE_INT_CST_HIGH (op1) == 0
1206 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1207 == TREE_INT_CST_LOW (op1))
1208 return true;
1210 break;
1212 default:
1213 break;
1215 return false;
1218 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1219 simplification is possible.
1220 If negate_expr_p would return true for T, NULL_TREE will never be
1221 returned. */
1223 static tree
1224 fold_negate_expr (tree t)
1226 tree type = TREE_TYPE (t);
1227 tree tem;
1229 switch (TREE_CODE (t))
1231 /* Convert - (~A) to A + 1. */
1232 case BIT_NOT_EXPR:
1233 if (INTEGRAL_TYPE_P (type))
1234 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1235 build_int_cst (type, 1));
1236 break;
1238 case INTEGER_CST:
1239 tem = fold_negate_const (t, type);
1240 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1241 || !TYPE_OVERFLOW_TRAPS (type))
1242 return tem;
1243 break;
1245 case REAL_CST:
1246 tem = fold_negate_const (t, type);
1247 /* Two's complement FP formats, such as c4x, may overflow. */
1248 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1249 return tem;
1250 break;
1252 case COMPLEX_CST:
1254 tree rpart = negate_expr (TREE_REALPART (t));
1255 tree ipart = negate_expr (TREE_IMAGPART (t));
1257 if ((TREE_CODE (rpart) == REAL_CST
1258 && TREE_CODE (ipart) == REAL_CST)
1259 || (TREE_CODE (rpart) == INTEGER_CST
1260 && TREE_CODE (ipart) == INTEGER_CST))
1261 return build_complex (type, rpart, ipart);
1263 break;
1265 case COMPLEX_EXPR:
1266 if (negate_expr_p (t))
1267 return fold_build2 (COMPLEX_EXPR, type,
1268 fold_negate_expr (TREE_OPERAND (t, 0)),
1269 fold_negate_expr (TREE_OPERAND (t, 1)));
1270 break;
1272 case CONJ_EXPR:
1273 if (negate_expr_p (t))
1274 return fold_build1 (CONJ_EXPR, type,
1275 fold_negate_expr (TREE_OPERAND (t, 0)));
1276 break;
1278 case NEGATE_EXPR:
1279 return TREE_OPERAND (t, 0);
1281 case PLUS_EXPR:
1282 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1283 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1285 /* -(A + B) -> (-B) - A. */
1286 if (negate_expr_p (TREE_OPERAND (t, 1))
1287 && reorder_operands_p (TREE_OPERAND (t, 0),
1288 TREE_OPERAND (t, 1)))
1290 tem = negate_expr (TREE_OPERAND (t, 1));
1291 return fold_build2 (MINUS_EXPR, type,
1292 tem, TREE_OPERAND (t, 0));
1295 /* -(A + B) -> (-A) - B. */
1296 if (negate_expr_p (TREE_OPERAND (t, 0)))
1298 tem = negate_expr (TREE_OPERAND (t, 0));
1299 return fold_build2 (MINUS_EXPR, type,
1300 tem, TREE_OPERAND (t, 1));
1303 break;
1305 case MINUS_EXPR:
1306 /* - (A - B) -> B - A */
1307 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1308 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1309 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1310 return fold_build2 (MINUS_EXPR, type,
1311 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1312 break;
1314 case MULT_EXPR:
1315 if (TYPE_UNSIGNED (type))
1316 break;
1318 /* Fall through. */
1320 case RDIV_EXPR:
1321 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1323 tem = TREE_OPERAND (t, 1);
1324 if (negate_expr_p (tem))
1325 return fold_build2 (TREE_CODE (t), type,
1326 TREE_OPERAND (t, 0), negate_expr (tem));
1327 tem = TREE_OPERAND (t, 0);
1328 if (negate_expr_p (tem))
1329 return fold_build2 (TREE_CODE (t), type,
1330 negate_expr (tem), TREE_OPERAND (t, 1));
1332 break;
1334 case TRUNC_DIV_EXPR:
1335 case ROUND_DIV_EXPR:
1336 case FLOOR_DIV_EXPR:
1337 case CEIL_DIV_EXPR:
1338 case EXACT_DIV_EXPR:
1339 /* In general we can't negate A / B, because if A is INT_MIN and
1340 B is 1, we may turn this into INT_MIN / -1 which is undefined
1341 and actually traps on some architectures. But if overflow is
1342 undefined, we can negate, because - (INT_MIN / 1) is an
1343 overflow. */
1344 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1346 const char * const warnmsg = G_("assuming signed overflow does not "
1347 "occur when negating a division");
1348 tem = TREE_OPERAND (t, 1);
1349 if (negate_expr_p (tem))
1351 if (INTEGRAL_TYPE_P (type)
1352 && (TREE_CODE (tem) != INTEGER_CST
1353 || integer_onep (tem)))
1354 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1355 return fold_build2 (TREE_CODE (t), type,
1356 TREE_OPERAND (t, 0), negate_expr (tem));
1358 tem = TREE_OPERAND (t, 0);
1359 if (negate_expr_p (tem))
1361 if (INTEGRAL_TYPE_P (type)
1362 && (TREE_CODE (tem) != INTEGER_CST
1363 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1364 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1365 return fold_build2 (TREE_CODE (t), type,
1366 negate_expr (tem), TREE_OPERAND (t, 1));
1369 break;
1371 case NOP_EXPR:
1372 /* Convert -((double)float) into (double)(-float). */
1373 if (TREE_CODE (type) == REAL_TYPE)
1375 tem = strip_float_extensions (t);
1376 if (tem != t && negate_expr_p (tem))
1377 return negate_expr (tem);
1379 break;
1381 case CALL_EXPR:
1382 /* Negate -f(x) as f(-x). */
1383 if (negate_mathfn_p (builtin_mathfn_code (t))
1384 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1386 tree fndecl, arg;
1388 fndecl = get_callee_fndecl (t);
1389 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1390 return build_call_expr (fndecl, 1, arg);
1392 break;
1394 case RSHIFT_EXPR:
1395 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1396 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1398 tree op1 = TREE_OPERAND (t, 1);
1399 if (TREE_INT_CST_HIGH (op1) == 0
1400 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1401 == TREE_INT_CST_LOW (op1))
1403 tree ntype = TYPE_UNSIGNED (type)
1404 ? lang_hooks.types.signed_type (type)
1405 : lang_hooks.types.unsigned_type (type);
1406 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1407 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1408 return fold_convert (type, temp);
1411 break;
1413 default:
1414 break;
1417 return NULL_TREE;
1420 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1421 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1422 return NULL_TREE. */
1424 static tree
1425 negate_expr (tree t)
1427 tree type, tem;
1429 if (t == NULL_TREE)
1430 return NULL_TREE;
1432 type = TREE_TYPE (t);
1433 STRIP_SIGN_NOPS (t);
1435 tem = fold_negate_expr (t);
1436 if (!tem)
1437 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1438 return fold_convert (type, tem);
1441 /* Split a tree IN into a constant, literal and variable parts that could be
1442 combined with CODE to make IN. "constant" means an expression with
1443 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1444 commutative arithmetic operation. Store the constant part into *CONP,
1445 the literal in *LITP and return the variable part. If a part isn't
1446 present, set it to null. If the tree does not decompose in this way,
1447 return the entire tree as the variable part and the other parts as null.
1449 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1450 case, we negate an operand that was subtracted. Except if it is a
1451 literal for which we use *MINUS_LITP instead.
1453 If NEGATE_P is true, we are negating all of IN, again except a literal
1454 for which we use *MINUS_LITP instead.
1456 If IN is itself a literal or constant, return it as appropriate.
1458 Note that we do not guarantee that any of the three values will be the
1459 same type as IN, but they will have the same signedness and mode. */
1461 static tree
1462 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1463 tree *minus_litp, int negate_p)
1465 tree var = 0;
1467 *conp = 0;
1468 *litp = 0;
1469 *minus_litp = 0;
1471 /* Strip any conversions that don't change the machine mode or signedness. */
1472 STRIP_SIGN_NOPS (in);
1474 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1475 *litp = in;
1476 else if (TREE_CODE (in) == code
1477 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1478 /* We can associate addition and subtraction together (even
1479 though the C standard doesn't say so) for integers because
1480 the value is not affected. For reals, the value might be
1481 affected, so we can't. */
1482 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1483 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1485 tree op0 = TREE_OPERAND (in, 0);
1486 tree op1 = TREE_OPERAND (in, 1);
1487 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1488 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1490 /* First see if either of the operands is a literal, then a constant. */
1491 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1492 *litp = op0, op0 = 0;
1493 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1494 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1496 if (op0 != 0 && TREE_CONSTANT (op0))
1497 *conp = op0, op0 = 0;
1498 else if (op1 != 0 && TREE_CONSTANT (op1))
1499 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1501 /* If we haven't dealt with either operand, this is not a case we can
1502 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1503 if (op0 != 0 && op1 != 0)
1504 var = in;
1505 else if (op0 != 0)
1506 var = op0;
1507 else
1508 var = op1, neg_var_p = neg1_p;
1510 /* Now do any needed negations. */
1511 if (neg_litp_p)
1512 *minus_litp = *litp, *litp = 0;
1513 if (neg_conp_p)
1514 *conp = negate_expr (*conp);
1515 if (neg_var_p)
1516 var = negate_expr (var);
1518 else if (TREE_CONSTANT (in))
1519 *conp = in;
1520 else
1521 var = in;
1523 if (negate_p)
1525 if (*litp)
1526 *minus_litp = *litp, *litp = 0;
1527 else if (*minus_litp)
1528 *litp = *minus_litp, *minus_litp = 0;
1529 *conp = negate_expr (*conp);
1530 var = negate_expr (var);
1533 return var;
1536 /* Re-associate trees split by the above function. T1 and T2 are either
1537 expressions to associate or null. Return the new expression, if any. If
1538 we build an operation, do it in TYPE and with CODE. */
1540 static tree
1541 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1543 if (t1 == 0)
1544 return t2;
1545 else if (t2 == 0)
1546 return t1;
1548 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1549 try to fold this since we will have infinite recursion. But do
1550 deal with any NEGATE_EXPRs. */
1551 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1552 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1554 if (code == PLUS_EXPR)
1556 if (TREE_CODE (t1) == NEGATE_EXPR)
1557 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1558 fold_convert (type, TREE_OPERAND (t1, 0)));
1559 else if (TREE_CODE (t2) == NEGATE_EXPR)
1560 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1561 fold_convert (type, TREE_OPERAND (t2, 0)));
1562 else if (integer_zerop (t2))
1563 return fold_convert (type, t1);
1565 else if (code == MINUS_EXPR)
1567 if (integer_zerop (t2))
1568 return fold_convert (type, t1);
1571 return build2 (code, type, fold_convert (type, t1),
1572 fold_convert (type, t2));
1575 return fold_build2 (code, type, fold_convert (type, t1),
1576 fold_convert (type, t2));
1579 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1580 for use in int_const_binop, size_binop and size_diffop. */
1582 static bool
1583 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1585 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1586 return false;
1587 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1588 return false;
1590 switch (code)
1592 case LSHIFT_EXPR:
1593 case RSHIFT_EXPR:
1594 case LROTATE_EXPR:
1595 case RROTATE_EXPR:
1596 return true;
1598 default:
1599 break;
1602 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1603 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1604 && TYPE_MODE (type1) == TYPE_MODE (type2);
1608 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1609 to produce a new constant. Return NULL_TREE if we don't know how
1610 to evaluate CODE at compile-time.
1612 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1614 tree
1615 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1617 unsigned HOST_WIDE_INT int1l, int2l;
1618 HOST_WIDE_INT int1h, int2h;
1619 unsigned HOST_WIDE_INT low;
1620 HOST_WIDE_INT hi;
1621 unsigned HOST_WIDE_INT garbagel;
1622 HOST_WIDE_INT garbageh;
1623 tree t;
1624 tree type = TREE_TYPE (arg1);
1625 int uns = TYPE_UNSIGNED (type);
1626 int is_sizetype
1627 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1628 int overflow = 0;
1630 int1l = TREE_INT_CST_LOW (arg1);
1631 int1h = TREE_INT_CST_HIGH (arg1);
1632 int2l = TREE_INT_CST_LOW (arg2);
1633 int2h = TREE_INT_CST_HIGH (arg2);
1635 switch (code)
1637 case BIT_IOR_EXPR:
1638 low = int1l | int2l, hi = int1h | int2h;
1639 break;
1641 case BIT_XOR_EXPR:
1642 low = int1l ^ int2l, hi = int1h ^ int2h;
1643 break;
1645 case BIT_AND_EXPR:
1646 low = int1l & int2l, hi = int1h & int2h;
1647 break;
1649 case RSHIFT_EXPR:
1650 int2l = -int2l;
1651 case LSHIFT_EXPR:
1652 /* It's unclear from the C standard whether shifts can overflow.
1653 The following code ignores overflow; perhaps a C standard
1654 interpretation ruling is needed. */
1655 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1656 &low, &hi, !uns);
1657 break;
1659 case RROTATE_EXPR:
1660 int2l = - int2l;
1661 case LROTATE_EXPR:
1662 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1663 &low, &hi);
1664 break;
1666 case PLUS_EXPR:
1667 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1668 break;
1670 case MINUS_EXPR:
1671 neg_double (int2l, int2h, &low, &hi);
1672 add_double (int1l, int1h, low, hi, &low, &hi);
1673 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1674 break;
1676 case MULT_EXPR:
1677 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1678 break;
1680 case TRUNC_DIV_EXPR:
1681 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1682 case EXACT_DIV_EXPR:
1683 /* This is a shortcut for a common special case. */
1684 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1685 && !TREE_OVERFLOW (arg1)
1686 && !TREE_OVERFLOW (arg2)
1687 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1689 if (code == CEIL_DIV_EXPR)
1690 int1l += int2l - 1;
1692 low = int1l / int2l, hi = 0;
1693 break;
1696 /* ... fall through ... */
1698 case ROUND_DIV_EXPR:
1699 if (int2h == 0 && int2l == 0)
1700 return NULL_TREE;
1701 if (int2h == 0 && int2l == 1)
1703 low = int1l, hi = int1h;
1704 break;
1706 if (int1l == int2l && int1h == int2h
1707 && ! (int1l == 0 && int1h == 0))
1709 low = 1, hi = 0;
1710 break;
1712 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1713 &low, &hi, &garbagel, &garbageh);
1714 break;
1716 case TRUNC_MOD_EXPR:
1717 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1718 /* This is a shortcut for a common special case. */
1719 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1720 && !TREE_OVERFLOW (arg1)
1721 && !TREE_OVERFLOW (arg2)
1722 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1724 if (code == CEIL_MOD_EXPR)
1725 int1l += int2l - 1;
1726 low = int1l % int2l, hi = 0;
1727 break;
1730 /* ... fall through ... */
1732 case ROUND_MOD_EXPR:
1733 if (int2h == 0 && int2l == 0)
1734 return NULL_TREE;
1735 overflow = div_and_round_double (code, uns,
1736 int1l, int1h, int2l, int2h,
1737 &garbagel, &garbageh, &low, &hi);
1738 break;
1740 case MIN_EXPR:
1741 case MAX_EXPR:
1742 if (uns)
1743 low = (((unsigned HOST_WIDE_INT) int1h
1744 < (unsigned HOST_WIDE_INT) int2h)
1745 || (((unsigned HOST_WIDE_INT) int1h
1746 == (unsigned HOST_WIDE_INT) int2h)
1747 && int1l < int2l));
1748 else
1749 low = (int1h < int2h
1750 || (int1h == int2h && int1l < int2l));
1752 if (low == (code == MIN_EXPR))
1753 low = int1l, hi = int1h;
1754 else
1755 low = int2l, hi = int2h;
1756 break;
1758 default:
1759 return NULL_TREE;
1762 if (notrunc)
1764 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1766 /* Propagate overflow flags ourselves. */
1767 if (((!uns || is_sizetype) && overflow)
1768 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1770 t = copy_node (t);
1771 TREE_OVERFLOW (t) = 1;
1774 else
1775 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1776 ((!uns || is_sizetype) && overflow)
1777 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1779 return t;
1782 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1783 constant. We assume ARG1 and ARG2 have the same data type, or at least
1784 are the same kind of constant and the same machine mode. Return zero if
1785 combining the constants is not allowed in the current operating mode.
1787 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1789 static tree
1790 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1792 /* Sanity check for the recursive cases. */
1793 if (!arg1 || !arg2)
1794 return NULL_TREE;
1796 STRIP_NOPS (arg1);
1797 STRIP_NOPS (arg2);
1799 if (TREE_CODE (arg1) == INTEGER_CST)
1800 return int_const_binop (code, arg1, arg2, notrunc);
1802 if (TREE_CODE (arg1) == REAL_CST)
1804 enum machine_mode mode;
1805 REAL_VALUE_TYPE d1;
1806 REAL_VALUE_TYPE d2;
1807 REAL_VALUE_TYPE value;
1808 REAL_VALUE_TYPE result;
1809 bool inexact;
1810 tree t, type;
1812 /* The following codes are handled by real_arithmetic. */
1813 switch (code)
1815 case PLUS_EXPR:
1816 case MINUS_EXPR:
1817 case MULT_EXPR:
1818 case RDIV_EXPR:
1819 case MIN_EXPR:
1820 case MAX_EXPR:
1821 break;
1823 default:
1824 return NULL_TREE;
1827 d1 = TREE_REAL_CST (arg1);
1828 d2 = TREE_REAL_CST (arg2);
1830 type = TREE_TYPE (arg1);
1831 mode = TYPE_MODE (type);
1833 /* Don't perform operation if we honor signaling NaNs and
1834 either operand is a NaN. */
1835 if (HONOR_SNANS (mode)
1836 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1837 return NULL_TREE;
1839 /* Don't perform operation if it would raise a division
1840 by zero exception. */
1841 if (code == RDIV_EXPR
1842 && REAL_VALUES_EQUAL (d2, dconst0)
1843 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1844 return NULL_TREE;
1846 /* If either operand is a NaN, just return it. Otherwise, set up
1847 for floating-point trap; we return an overflow. */
1848 if (REAL_VALUE_ISNAN (d1))
1849 return arg1;
1850 else if (REAL_VALUE_ISNAN (d2))
1851 return arg2;
1853 inexact = real_arithmetic (&value, code, &d1, &d2);
1854 real_convert (&result, mode, &value);
1856 /* Don't constant fold this floating point operation if
1857 the result has overflowed and flag_trapping_math. */
1858 if (flag_trapping_math
1859 && MODE_HAS_INFINITIES (mode)
1860 && REAL_VALUE_ISINF (result)
1861 && !REAL_VALUE_ISINF (d1)
1862 && !REAL_VALUE_ISINF (d2))
1863 return NULL_TREE;
1865 /* Don't constant fold this floating point operation if the
1866 result may dependent upon the run-time rounding mode and
1867 flag_rounding_math is set, or if GCC's software emulation
1868 is unable to accurately represent the result. */
1869 if ((flag_rounding_math
1870 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1871 && !flag_unsafe_math_optimizations))
1872 && (inexact || !real_identical (&result, &value)))
1873 return NULL_TREE;
1875 t = build_real (type, result);
1877 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1878 return t;
1881 if (TREE_CODE (arg1) == COMPLEX_CST)
1883 tree type = TREE_TYPE (arg1);
1884 tree r1 = TREE_REALPART (arg1);
1885 tree i1 = TREE_IMAGPART (arg1);
1886 tree r2 = TREE_REALPART (arg2);
1887 tree i2 = TREE_IMAGPART (arg2);
1888 tree real, imag;
1890 switch (code)
1892 case PLUS_EXPR:
1893 case MINUS_EXPR:
1894 real = const_binop (code, r1, r2, notrunc);
1895 imag = const_binop (code, i1, i2, notrunc);
1896 break;
1898 case MULT_EXPR:
1899 real = const_binop (MINUS_EXPR,
1900 const_binop (MULT_EXPR, r1, r2, notrunc),
1901 const_binop (MULT_EXPR, i1, i2, notrunc),
1902 notrunc);
1903 imag = const_binop (PLUS_EXPR,
1904 const_binop (MULT_EXPR, r1, i2, notrunc),
1905 const_binop (MULT_EXPR, i1, r2, notrunc),
1906 notrunc);
1907 break;
1909 case RDIV_EXPR:
1911 tree magsquared
1912 = const_binop (PLUS_EXPR,
1913 const_binop (MULT_EXPR, r2, r2, notrunc),
1914 const_binop (MULT_EXPR, i2, i2, notrunc),
1915 notrunc);
1916 tree t1
1917 = const_binop (PLUS_EXPR,
1918 const_binop (MULT_EXPR, r1, r2, notrunc),
1919 const_binop (MULT_EXPR, i1, i2, notrunc),
1920 notrunc);
1921 tree t2
1922 = const_binop (MINUS_EXPR,
1923 const_binop (MULT_EXPR, i1, r2, notrunc),
1924 const_binop (MULT_EXPR, r1, i2, notrunc),
1925 notrunc);
1927 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1928 code = TRUNC_DIV_EXPR;
1930 real = const_binop (code, t1, magsquared, notrunc);
1931 imag = const_binop (code, t2, magsquared, notrunc);
1933 break;
1935 default:
1936 return NULL_TREE;
1939 if (real && imag)
1940 return build_complex (type, real, imag);
1943 return NULL_TREE;
1946 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1947 indicates which particular sizetype to create. */
1949 tree
1950 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1952 return build_int_cst (sizetype_tab[(int) kind], number);
1955 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1956 is a tree code. The type of the result is taken from the operands.
1957 Both must be equivalent integer types, ala int_binop_types_match_p.
1958 If the operands are constant, so is the result. */
1960 tree
1961 size_binop (enum tree_code code, tree arg0, tree arg1)
1963 tree type = TREE_TYPE (arg0);
1965 if (arg0 == error_mark_node || arg1 == error_mark_node)
1966 return error_mark_node;
1968 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1969 TREE_TYPE (arg1)));
1971 /* Handle the special case of two integer constants faster. */
1972 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1974 /* And some specific cases even faster than that. */
1975 if (code == PLUS_EXPR)
1977 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1978 return arg1;
1979 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1980 return arg0;
1982 else if (code == MINUS_EXPR)
1984 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1985 return arg0;
1987 else if (code == MULT_EXPR)
1989 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1990 return arg1;
1993 /* Handle general case of two integer constants. */
1994 return int_const_binop (code, arg0, arg1, 0);
1997 return fold_build2 (code, type, arg0, arg1);
2000 /* Given two values, either both of sizetype or both of bitsizetype,
2001 compute the difference between the two values. Return the value
2002 in signed type corresponding to the type of the operands. */
2004 tree
2005 size_diffop (tree arg0, tree arg1)
2007 tree type = TREE_TYPE (arg0);
2008 tree ctype;
2010 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2011 TREE_TYPE (arg1)));
2013 /* If the type is already signed, just do the simple thing. */
2014 if (!TYPE_UNSIGNED (type))
2015 return size_binop (MINUS_EXPR, arg0, arg1);
2017 if (type == sizetype)
2018 ctype = ssizetype;
2019 else if (type == bitsizetype)
2020 ctype = sbitsizetype;
2021 else
2022 ctype = lang_hooks.types.signed_type (type);
2024 /* If either operand is not a constant, do the conversions to the signed
2025 type and subtract. The hardware will do the right thing with any
2026 overflow in the subtraction. */
2027 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2028 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2029 fold_convert (ctype, arg1));
2031 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2032 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2033 overflow) and negate (which can't either). Special-case a result
2034 of zero while we're here. */
2035 if (tree_int_cst_equal (arg0, arg1))
2036 return build_int_cst (ctype, 0);
2037 else if (tree_int_cst_lt (arg1, arg0))
2038 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2039 else
2040 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2041 fold_convert (ctype, size_binop (MINUS_EXPR,
2042 arg1, arg0)));
2045 /* A subroutine of fold_convert_const handling conversions of an
2046 INTEGER_CST to another integer type. */
2048 static tree
2049 fold_convert_const_int_from_int (tree type, tree arg1)
2051 tree t;
2053 /* Given an integer constant, make new constant with new type,
2054 appropriately sign-extended or truncated. */
2055 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2056 TREE_INT_CST_HIGH (arg1),
2057 /* Don't set the overflow when
2058 converting a pointer */
2059 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2060 (TREE_INT_CST_HIGH (arg1) < 0
2061 && (TYPE_UNSIGNED (type)
2062 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2063 | TREE_OVERFLOW (arg1));
2065 return t;
2068 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2069 to an integer type. */
2071 static tree
2072 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2074 int overflow = 0;
2075 tree t;
2077 /* The following code implements the floating point to integer
2078 conversion rules required by the Java Language Specification,
2079 that IEEE NaNs are mapped to zero and values that overflow
2080 the target precision saturate, i.e. values greater than
2081 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2082 are mapped to INT_MIN. These semantics are allowed by the
2083 C and C++ standards that simply state that the behavior of
2084 FP-to-integer conversion is unspecified upon overflow. */
2086 HOST_WIDE_INT high, low;
2087 REAL_VALUE_TYPE r;
2088 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2090 switch (code)
2092 case FIX_TRUNC_EXPR:
2093 real_trunc (&r, VOIDmode, &x);
2094 break;
2096 default:
2097 gcc_unreachable ();
2100 /* If R is NaN, return zero and show we have an overflow. */
2101 if (REAL_VALUE_ISNAN (r))
2103 overflow = 1;
2104 high = 0;
2105 low = 0;
2108 /* See if R is less than the lower bound or greater than the
2109 upper bound. */
2111 if (! overflow)
2113 tree lt = TYPE_MIN_VALUE (type);
2114 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2115 if (REAL_VALUES_LESS (r, l))
2117 overflow = 1;
2118 high = TREE_INT_CST_HIGH (lt);
2119 low = TREE_INT_CST_LOW (lt);
2123 if (! overflow)
2125 tree ut = TYPE_MAX_VALUE (type);
2126 if (ut)
2128 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2129 if (REAL_VALUES_LESS (u, r))
2131 overflow = 1;
2132 high = TREE_INT_CST_HIGH (ut);
2133 low = TREE_INT_CST_LOW (ut);
2138 if (! overflow)
2139 REAL_VALUE_TO_INT (&low, &high, r);
2141 t = force_fit_type_double (type, low, high, -1,
2142 overflow | TREE_OVERFLOW (arg1));
2143 return t;
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to another floating point type. */
2149 static tree
2150 fold_convert_const_real_from_real (tree type, tree arg1)
2152 REAL_VALUE_TYPE value;
2153 tree t;
2155 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2156 t = build_real (type, value);
2158 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2159 return t;
2162 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2163 type TYPE. If no simplification can be done return NULL_TREE. */
2165 static tree
2166 fold_convert_const (enum tree_code code, tree type, tree arg1)
2168 if (TREE_TYPE (arg1) == type)
2169 return arg1;
2171 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2173 if (TREE_CODE (arg1) == INTEGER_CST)
2174 return fold_convert_const_int_from_int (type, arg1);
2175 else if (TREE_CODE (arg1) == REAL_CST)
2176 return fold_convert_const_int_from_real (code, type, arg1);
2178 else if (TREE_CODE (type) == REAL_TYPE)
2180 if (TREE_CODE (arg1) == INTEGER_CST)
2181 return build_real_from_int_cst (type, arg1);
2182 if (TREE_CODE (arg1) == REAL_CST)
2183 return fold_convert_const_real_from_real (type, arg1);
2185 return NULL_TREE;
2188 /* Construct a vector of zero elements of vector type TYPE. */
2190 static tree
2191 build_zero_vector (tree type)
2193 tree elem, list;
2194 int i, units;
2196 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2197 units = TYPE_VECTOR_SUBPARTS (type);
2199 list = NULL_TREE;
2200 for (i = 0; i < units; i++)
2201 list = tree_cons (NULL_TREE, elem, list);
2202 return build_vector (type, list);
2205 /* Convert expression ARG to type TYPE. Used by the middle-end for
2206 simple conversions in preference to calling the front-end's convert. */
2208 tree
2209 fold_convert (tree type, tree arg)
2211 tree orig = TREE_TYPE (arg);
2212 tree tem;
2214 if (type == orig)
2215 return arg;
2217 if (TREE_CODE (arg) == ERROR_MARK
2218 || TREE_CODE (type) == ERROR_MARK
2219 || TREE_CODE (orig) == ERROR_MARK)
2220 return error_mark_node;
2222 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2223 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2224 TYPE_MAIN_VARIANT (orig)))
2225 return fold_build1 (NOP_EXPR, type, arg);
2227 switch (TREE_CODE (type))
2229 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2230 case POINTER_TYPE: case REFERENCE_TYPE:
2231 case OFFSET_TYPE:
2232 if (TREE_CODE (arg) == INTEGER_CST)
2234 tem = fold_convert_const (NOP_EXPR, type, arg);
2235 if (tem != NULL_TREE)
2236 return tem;
2238 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2239 || TREE_CODE (orig) == OFFSET_TYPE)
2240 return fold_build1 (NOP_EXPR, type, arg);
2241 if (TREE_CODE (orig) == COMPLEX_TYPE)
2243 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2244 return fold_convert (type, tem);
2246 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2247 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2248 return fold_build1 (NOP_EXPR, type, arg);
2250 case REAL_TYPE:
2251 if (TREE_CODE (arg) == INTEGER_CST)
2253 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2257 else if (TREE_CODE (arg) == REAL_CST)
2259 tem = fold_convert_const (NOP_EXPR, type, arg);
2260 if (tem != NULL_TREE)
2261 return tem;
2264 switch (TREE_CODE (orig))
2266 case INTEGER_TYPE:
2267 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2268 case POINTER_TYPE: case REFERENCE_TYPE:
2269 return fold_build1 (FLOAT_EXPR, type, arg);
2271 case REAL_TYPE:
2272 return fold_build1 (NOP_EXPR, type, arg);
2274 case COMPLEX_TYPE:
2275 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2276 return fold_convert (type, tem);
2278 default:
2279 gcc_unreachable ();
2282 case COMPLEX_TYPE:
2283 switch (TREE_CODE (orig))
2285 case INTEGER_TYPE:
2286 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2287 case POINTER_TYPE: case REFERENCE_TYPE:
2288 case REAL_TYPE:
2289 return build2 (COMPLEX_EXPR, type,
2290 fold_convert (TREE_TYPE (type), arg),
2291 fold_convert (TREE_TYPE (type), integer_zero_node));
2292 case COMPLEX_TYPE:
2294 tree rpart, ipart;
2296 if (TREE_CODE (arg) == COMPLEX_EXPR)
2298 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2299 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2300 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2303 arg = save_expr (arg);
2304 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2305 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2306 rpart = fold_convert (TREE_TYPE (type), rpart);
2307 ipart = fold_convert (TREE_TYPE (type), ipart);
2308 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2311 default:
2312 gcc_unreachable ();
2315 case VECTOR_TYPE:
2316 if (integer_zerop (arg))
2317 return build_zero_vector (type);
2318 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2319 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2320 || TREE_CODE (orig) == VECTOR_TYPE);
2321 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2323 case VOID_TYPE:
2324 tem = fold_ignored_result (arg);
2325 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2326 return tem;
2327 return fold_build1 (NOP_EXPR, type, tem);
2329 default:
2330 gcc_unreachable ();
2334 /* Return false if expr can be assumed not to be an lvalue, true
2335 otherwise. */
2337 static bool
2338 maybe_lvalue_p (tree x)
2340 /* We only need to wrap lvalue tree codes. */
2341 switch (TREE_CODE (x))
2343 case VAR_DECL:
2344 case PARM_DECL:
2345 case RESULT_DECL:
2346 case LABEL_DECL:
2347 case FUNCTION_DECL:
2348 case SSA_NAME:
2350 case COMPONENT_REF:
2351 case INDIRECT_REF:
2352 case ALIGN_INDIRECT_REF:
2353 case MISALIGNED_INDIRECT_REF:
2354 case ARRAY_REF:
2355 case ARRAY_RANGE_REF:
2356 case BIT_FIELD_REF:
2357 case OBJ_TYPE_REF:
2359 case REALPART_EXPR:
2360 case IMAGPART_EXPR:
2361 case PREINCREMENT_EXPR:
2362 case PREDECREMENT_EXPR:
2363 case SAVE_EXPR:
2364 case TRY_CATCH_EXPR:
2365 case WITH_CLEANUP_EXPR:
2366 case COMPOUND_EXPR:
2367 case MODIFY_EXPR:
2368 case GIMPLE_MODIFY_STMT:
2369 case TARGET_EXPR:
2370 case COND_EXPR:
2371 case BIND_EXPR:
2372 case MIN_EXPR:
2373 case MAX_EXPR:
2374 break;
2376 default:
2377 /* Assume the worst for front-end tree codes. */
2378 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2379 break;
2380 return false;
2383 return true;
2386 /* Return an expr equal to X but certainly not valid as an lvalue. */
2388 tree
2389 non_lvalue (tree x)
2391 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2392 us. */
2393 if (in_gimple_form)
2394 return x;
2396 if (! maybe_lvalue_p (x))
2397 return x;
2398 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2401 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2402 Zero means allow extended lvalues. */
2404 int pedantic_lvalues;
2406 /* When pedantic, return an expr equal to X but certainly not valid as a
2407 pedantic lvalue. Otherwise, return X. */
2409 static tree
2410 pedantic_non_lvalue (tree x)
2412 if (pedantic_lvalues)
2413 return non_lvalue (x);
2414 else
2415 return x;
2418 /* Given a tree comparison code, return the code that is the logical inverse
2419 of the given code. It is not safe to do this for floating-point
2420 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2421 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2423 enum tree_code
2424 invert_tree_comparison (enum tree_code code, bool honor_nans)
2426 if (honor_nans && flag_trapping_math)
2427 return ERROR_MARK;
2429 switch (code)
2431 case EQ_EXPR:
2432 return NE_EXPR;
2433 case NE_EXPR:
2434 return EQ_EXPR;
2435 case GT_EXPR:
2436 return honor_nans ? UNLE_EXPR : LE_EXPR;
2437 case GE_EXPR:
2438 return honor_nans ? UNLT_EXPR : LT_EXPR;
2439 case LT_EXPR:
2440 return honor_nans ? UNGE_EXPR : GE_EXPR;
2441 case LE_EXPR:
2442 return honor_nans ? UNGT_EXPR : GT_EXPR;
2443 case LTGT_EXPR:
2444 return UNEQ_EXPR;
2445 case UNEQ_EXPR:
2446 return LTGT_EXPR;
2447 case UNGT_EXPR:
2448 return LE_EXPR;
2449 case UNGE_EXPR:
2450 return LT_EXPR;
2451 case UNLT_EXPR:
2452 return GE_EXPR;
2453 case UNLE_EXPR:
2454 return GT_EXPR;
2455 case ORDERED_EXPR:
2456 return UNORDERED_EXPR;
2457 case UNORDERED_EXPR:
2458 return ORDERED_EXPR;
2459 default:
2460 gcc_unreachable ();
2464 /* Similar, but return the comparison that results if the operands are
2465 swapped. This is safe for floating-point. */
2467 enum tree_code
2468 swap_tree_comparison (enum tree_code code)
2470 switch (code)
2472 case EQ_EXPR:
2473 case NE_EXPR:
2474 case ORDERED_EXPR:
2475 case UNORDERED_EXPR:
2476 case LTGT_EXPR:
2477 case UNEQ_EXPR:
2478 return code;
2479 case GT_EXPR:
2480 return LT_EXPR;
2481 case GE_EXPR:
2482 return LE_EXPR;
2483 case LT_EXPR:
2484 return GT_EXPR;
2485 case LE_EXPR:
2486 return GE_EXPR;
2487 case UNGT_EXPR:
2488 return UNLT_EXPR;
2489 case UNGE_EXPR:
2490 return UNLE_EXPR;
2491 case UNLT_EXPR:
2492 return UNGT_EXPR;
2493 case UNLE_EXPR:
2494 return UNGE_EXPR;
2495 default:
2496 gcc_unreachable ();
2501 /* Convert a comparison tree code from an enum tree_code representation
2502 into a compcode bit-based encoding. This function is the inverse of
2503 compcode_to_comparison. */
2505 static enum comparison_code
2506 comparison_to_compcode (enum tree_code code)
2508 switch (code)
2510 case LT_EXPR:
2511 return COMPCODE_LT;
2512 case EQ_EXPR:
2513 return COMPCODE_EQ;
2514 case LE_EXPR:
2515 return COMPCODE_LE;
2516 case GT_EXPR:
2517 return COMPCODE_GT;
2518 case NE_EXPR:
2519 return COMPCODE_NE;
2520 case GE_EXPR:
2521 return COMPCODE_GE;
2522 case ORDERED_EXPR:
2523 return COMPCODE_ORD;
2524 case UNORDERED_EXPR:
2525 return COMPCODE_UNORD;
2526 case UNLT_EXPR:
2527 return COMPCODE_UNLT;
2528 case UNEQ_EXPR:
2529 return COMPCODE_UNEQ;
2530 case UNLE_EXPR:
2531 return COMPCODE_UNLE;
2532 case UNGT_EXPR:
2533 return COMPCODE_UNGT;
2534 case LTGT_EXPR:
2535 return COMPCODE_LTGT;
2536 case UNGE_EXPR:
2537 return COMPCODE_UNGE;
2538 default:
2539 gcc_unreachable ();
2543 /* Convert a compcode bit-based encoding of a comparison operator back
2544 to GCC's enum tree_code representation. This function is the
2545 inverse of comparison_to_compcode. */
2547 static enum tree_code
2548 compcode_to_comparison (enum comparison_code code)
2550 switch (code)
2552 case COMPCODE_LT:
2553 return LT_EXPR;
2554 case COMPCODE_EQ:
2555 return EQ_EXPR;
2556 case COMPCODE_LE:
2557 return LE_EXPR;
2558 case COMPCODE_GT:
2559 return GT_EXPR;
2560 case COMPCODE_NE:
2561 return NE_EXPR;
2562 case COMPCODE_GE:
2563 return GE_EXPR;
2564 case COMPCODE_ORD:
2565 return ORDERED_EXPR;
2566 case COMPCODE_UNORD:
2567 return UNORDERED_EXPR;
2568 case COMPCODE_UNLT:
2569 return UNLT_EXPR;
2570 case COMPCODE_UNEQ:
2571 return UNEQ_EXPR;
2572 case COMPCODE_UNLE:
2573 return UNLE_EXPR;
2574 case COMPCODE_UNGT:
2575 return UNGT_EXPR;
2576 case COMPCODE_LTGT:
2577 return LTGT_EXPR;
2578 case COMPCODE_UNGE:
2579 return UNGE_EXPR;
2580 default:
2581 gcc_unreachable ();
2585 /* Return a tree for the comparison which is the combination of
2586 doing the AND or OR (depending on CODE) of the two operations LCODE
2587 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2588 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2589 if this makes the transformation invalid. */
2591 tree
2592 combine_comparisons (enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2596 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2599 enum comparison_code compcode;
2601 switch (code)
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2605 break;
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2609 break;
2611 default:
2612 return NULL_TREE;
2615 if (!honor_nans)
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2625 else if (flag_trapping_math)
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2647 rtrap = false;
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2651 if (rtrap && !ltrap
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2653 return NULL_TREE;
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2657 return NULL_TREE;
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2664 else
2665 return fold_build2 (compcode_to_comparison (compcode),
2666 truth_type, ll_arg, lr_arg);
2669 /* Return nonzero if CODE is a tree code that represents a truth value. */
2671 static int
2672 truth_value_p (enum tree_code code)
2674 return (TREE_CODE_CLASS (code) == tcc_comparison
2675 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2676 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2677 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2680 /* Return nonzero if two operands (typically of the same tree node)
2681 are necessarily equal. If either argument has side-effects this
2682 function returns zero. FLAGS modifies behavior as follows:
2684 If OEP_ONLY_CONST is set, only return nonzero for constants.
2685 This function tests whether the operands are indistinguishable;
2686 it does not test whether they are equal using C's == operation.
2687 The distinction is important for IEEE floating point, because
2688 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2689 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2691 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2692 even though it may hold multiple values during a function.
2693 This is because a GCC tree node guarantees that nothing else is
2694 executed between the evaluation of its "operands" (which may often
2695 be evaluated in arbitrary order). Hence if the operands themselves
2696 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2697 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2698 unset means assuming isochronic (or instantaneous) tree equivalence.
2699 Unless comparing arbitrary expression trees, such as from different
2700 statements, this flag can usually be left unset.
2702 If OEP_PURE_SAME is set, then pure functions with identical arguments
2703 are considered the same. It is used when the caller has other ways
2704 to ensure that global memory is unchanged in between. */
2707 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2709 /* If either is ERROR_MARK, they aren't equal. */
2710 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2711 return 0;
2713 /* If both types don't have the same signedness, then we can't consider
2714 them equal. We must check this before the STRIP_NOPS calls
2715 because they may change the signedness of the arguments. */
2716 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2717 return 0;
2719 /* If both types don't have the same precision, then it is not safe
2720 to strip NOPs. */
2721 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2722 return 0;
2724 STRIP_NOPS (arg0);
2725 STRIP_NOPS (arg1);
2727 /* In case both args are comparisons but with different comparison
2728 code, try to swap the comparison operands of one arg to produce
2729 a match and compare that variant. */
2730 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2731 && COMPARISON_CLASS_P (arg0)
2732 && COMPARISON_CLASS_P (arg1))
2734 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2736 if (TREE_CODE (arg0) == swap_code)
2737 return operand_equal_p (TREE_OPERAND (arg0, 0),
2738 TREE_OPERAND (arg1, 1), flags)
2739 && operand_equal_p (TREE_OPERAND (arg0, 1),
2740 TREE_OPERAND (arg1, 0), flags);
2743 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2744 /* This is needed for conversions and for COMPONENT_REF.
2745 Might as well play it safe and always test this. */
2746 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2747 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2748 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2749 return 0;
2751 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2752 We don't care about side effects in that case because the SAVE_EXPR
2753 takes care of that for us. In all other cases, two expressions are
2754 equal if they have no side effects. If we have two identical
2755 expressions with side effects that should be treated the same due
2756 to the only side effects being identical SAVE_EXPR's, that will
2757 be detected in the recursive calls below. */
2758 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2759 && (TREE_CODE (arg0) == SAVE_EXPR
2760 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2761 return 1;
2763 /* Next handle constant cases, those for which we can return 1 even
2764 if ONLY_CONST is set. */
2765 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2766 switch (TREE_CODE (arg0))
2768 case INTEGER_CST:
2769 return tree_int_cst_equal (arg0, arg1);
2771 case REAL_CST:
2772 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2773 TREE_REAL_CST (arg1)))
2774 return 1;
2777 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2779 /* If we do not distinguish between signed and unsigned zero,
2780 consider them equal. */
2781 if (real_zerop (arg0) && real_zerop (arg1))
2782 return 1;
2784 return 0;
2786 case VECTOR_CST:
2788 tree v1, v2;
2790 v1 = TREE_VECTOR_CST_ELTS (arg0);
2791 v2 = TREE_VECTOR_CST_ELTS (arg1);
2792 while (v1 && v2)
2794 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2795 flags))
2796 return 0;
2797 v1 = TREE_CHAIN (v1);
2798 v2 = TREE_CHAIN (v2);
2801 return v1 == v2;
2804 case COMPLEX_CST:
2805 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2806 flags)
2807 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2808 flags));
2810 case STRING_CST:
2811 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2812 && ! memcmp (TREE_STRING_POINTER (arg0),
2813 TREE_STRING_POINTER (arg1),
2814 TREE_STRING_LENGTH (arg0)));
2816 case ADDR_EXPR:
2817 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2819 default:
2820 break;
2823 if (flags & OEP_ONLY_CONST)
2824 return 0;
2826 /* Define macros to test an operand from arg0 and arg1 for equality and a
2827 variant that allows null and views null as being different from any
2828 non-null value. In the latter case, if either is null, the both
2829 must be; otherwise, do the normal comparison. */
2830 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2831 TREE_OPERAND (arg1, N), flags)
2833 #define OP_SAME_WITH_NULL(N) \
2834 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2835 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2837 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2839 case tcc_unary:
2840 /* Two conversions are equal only if signedness and modes match. */
2841 switch (TREE_CODE (arg0))
2843 case NOP_EXPR:
2844 case CONVERT_EXPR:
2845 case FIX_TRUNC_EXPR:
2846 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2847 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2848 return 0;
2849 break;
2850 default:
2851 break;
2854 return OP_SAME (0);
2857 case tcc_comparison:
2858 case tcc_binary:
2859 if (OP_SAME (0) && OP_SAME (1))
2860 return 1;
2862 /* For commutative ops, allow the other order. */
2863 return (commutative_tree_code (TREE_CODE (arg0))
2864 && operand_equal_p (TREE_OPERAND (arg0, 0),
2865 TREE_OPERAND (arg1, 1), flags)
2866 && operand_equal_p (TREE_OPERAND (arg0, 1),
2867 TREE_OPERAND (arg1, 0), flags));
2869 case tcc_reference:
2870 /* If either of the pointer (or reference) expressions we are
2871 dereferencing contain a side effect, these cannot be equal. */
2872 if (TREE_SIDE_EFFECTS (arg0)
2873 || TREE_SIDE_EFFECTS (arg1))
2874 return 0;
2876 switch (TREE_CODE (arg0))
2878 case INDIRECT_REF:
2879 case ALIGN_INDIRECT_REF:
2880 case MISALIGNED_INDIRECT_REF:
2881 case REALPART_EXPR:
2882 case IMAGPART_EXPR:
2883 return OP_SAME (0);
2885 case ARRAY_REF:
2886 case ARRAY_RANGE_REF:
2887 /* Operands 2 and 3 may be null. */
2888 return (OP_SAME (0)
2889 && OP_SAME (1)
2890 && OP_SAME_WITH_NULL (2)
2891 && OP_SAME_WITH_NULL (3));
2893 case COMPONENT_REF:
2894 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2895 may be NULL when we're called to compare MEM_EXPRs. */
2896 return OP_SAME_WITH_NULL (0)
2897 && OP_SAME (1)
2898 && OP_SAME_WITH_NULL (2);
2900 case BIT_FIELD_REF:
2901 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2903 default:
2904 return 0;
2907 case tcc_expression:
2908 switch (TREE_CODE (arg0))
2910 case ADDR_EXPR:
2911 case TRUTH_NOT_EXPR:
2912 return OP_SAME (0);
2914 case TRUTH_ANDIF_EXPR:
2915 case TRUTH_ORIF_EXPR:
2916 return OP_SAME (0) && OP_SAME (1);
2918 case TRUTH_AND_EXPR:
2919 case TRUTH_OR_EXPR:
2920 case TRUTH_XOR_EXPR:
2921 if (OP_SAME (0) && OP_SAME (1))
2922 return 1;
2924 /* Otherwise take into account this is a commutative operation. */
2925 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2926 TREE_OPERAND (arg1, 1), flags)
2927 && operand_equal_p (TREE_OPERAND (arg0, 1),
2928 TREE_OPERAND (arg1, 0), flags));
2930 default:
2931 return 0;
2934 case tcc_vl_exp:
2935 switch (TREE_CODE (arg0))
2937 case CALL_EXPR:
2938 /* If the CALL_EXPRs call different functions, then they
2939 clearly can not be equal. */
2940 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2941 flags))
2942 return 0;
2945 unsigned int cef = call_expr_flags (arg0);
2946 if (flags & OEP_PURE_SAME)
2947 cef &= ECF_CONST | ECF_PURE;
2948 else
2949 cef &= ECF_CONST;
2950 if (!cef)
2951 return 0;
2954 /* Now see if all the arguments are the same. */
2956 call_expr_arg_iterator iter0, iter1;
2957 tree a0, a1;
2958 for (a0 = first_call_expr_arg (arg0, &iter0),
2959 a1 = first_call_expr_arg (arg1, &iter1);
2960 a0 && a1;
2961 a0 = next_call_expr_arg (&iter0),
2962 a1 = next_call_expr_arg (&iter1))
2963 if (! operand_equal_p (a0, a1, flags))
2964 return 0;
2966 /* If we get here and both argument lists are exhausted
2967 then the CALL_EXPRs are equal. */
2968 return ! (a0 || a1);
2970 default:
2971 return 0;
2974 case tcc_declaration:
2975 /* Consider __builtin_sqrt equal to sqrt. */
2976 return (TREE_CODE (arg0) == FUNCTION_DECL
2977 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2978 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2979 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2981 default:
2982 return 0;
2985 #undef OP_SAME
2986 #undef OP_SAME_WITH_NULL
2989 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2990 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2992 When in doubt, return 0. */
2994 static int
2995 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2997 int unsignedp1, unsignedpo;
2998 tree primarg0, primarg1, primother;
2999 unsigned int correct_width;
3001 if (operand_equal_p (arg0, arg1, 0))
3002 return 1;
3004 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3005 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3006 return 0;
3008 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3009 and see if the inner values are the same. This removes any
3010 signedness comparison, which doesn't matter here. */
3011 primarg0 = arg0, primarg1 = arg1;
3012 STRIP_NOPS (primarg0);
3013 STRIP_NOPS (primarg1);
3014 if (operand_equal_p (primarg0, primarg1, 0))
3015 return 1;
3017 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3018 actual comparison operand, ARG0.
3020 First throw away any conversions to wider types
3021 already present in the operands. */
3023 primarg1 = get_narrower (arg1, &unsignedp1);
3024 primother = get_narrower (other, &unsignedpo);
3026 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3027 if (unsignedp1 == unsignedpo
3028 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3029 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3031 tree type = TREE_TYPE (arg0);
3033 /* Make sure shorter operand is extended the right way
3034 to match the longer operand. */
3035 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
3036 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3038 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3039 return 1;
3042 return 0;
3045 /* See if ARG is an expression that is either a comparison or is performing
3046 arithmetic on comparisons. The comparisons must only be comparing
3047 two different values, which will be stored in *CVAL1 and *CVAL2; if
3048 they are nonzero it means that some operands have already been found.
3049 No variables may be used anywhere else in the expression except in the
3050 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3051 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3053 If this is true, return 1. Otherwise, return zero. */
3055 static int
3056 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3058 enum tree_code code = TREE_CODE (arg);
3059 enum tree_code_class class = TREE_CODE_CLASS (code);
3061 /* We can handle some of the tcc_expression cases here. */
3062 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3063 class = tcc_unary;
3064 else if (class == tcc_expression
3065 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3066 || code == COMPOUND_EXPR))
3067 class = tcc_binary;
3069 else if (class == tcc_expression && code == SAVE_EXPR
3070 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3072 /* If we've already found a CVAL1 or CVAL2, this expression is
3073 two complex to handle. */
3074 if (*cval1 || *cval2)
3075 return 0;
3077 class = tcc_unary;
3078 *save_p = 1;
3081 switch (class)
3083 case tcc_unary:
3084 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3086 case tcc_binary:
3087 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3088 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3089 cval1, cval2, save_p));
3091 case tcc_constant:
3092 return 1;
3094 case tcc_expression:
3095 if (code == COND_EXPR)
3096 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3097 cval1, cval2, save_p)
3098 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3099 cval1, cval2, save_p)
3100 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3101 cval1, cval2, save_p));
3102 return 0;
3104 case tcc_comparison:
3105 /* First see if we can handle the first operand, then the second. For
3106 the second operand, we know *CVAL1 can't be zero. It must be that
3107 one side of the comparison is each of the values; test for the
3108 case where this isn't true by failing if the two operands
3109 are the same. */
3111 if (operand_equal_p (TREE_OPERAND (arg, 0),
3112 TREE_OPERAND (arg, 1), 0))
3113 return 0;
3115 if (*cval1 == 0)
3116 *cval1 = TREE_OPERAND (arg, 0);
3117 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3119 else if (*cval2 == 0)
3120 *cval2 = TREE_OPERAND (arg, 0);
3121 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3123 else
3124 return 0;
3126 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3128 else if (*cval2 == 0)
3129 *cval2 = TREE_OPERAND (arg, 1);
3130 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3132 else
3133 return 0;
3135 return 1;
3137 default:
3138 return 0;
3142 /* ARG is a tree that is known to contain just arithmetic operations and
3143 comparisons. Evaluate the operations in the tree substituting NEW0 for
3144 any occurrence of OLD0 as an operand of a comparison and likewise for
3145 NEW1 and OLD1. */
3147 static tree
3148 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3150 tree type = TREE_TYPE (arg);
3151 enum tree_code code = TREE_CODE (arg);
3152 enum tree_code_class class = TREE_CODE_CLASS (code);
3154 /* We can handle some of the tcc_expression cases here. */
3155 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3156 class = tcc_unary;
3157 else if (class == tcc_expression
3158 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3159 class = tcc_binary;
3161 switch (class)
3163 case tcc_unary:
3164 return fold_build1 (code, type,
3165 eval_subst (TREE_OPERAND (arg, 0),
3166 old0, new0, old1, new1));
3168 case tcc_binary:
3169 return fold_build2 (code, type,
3170 eval_subst (TREE_OPERAND (arg, 0),
3171 old0, new0, old1, new1),
3172 eval_subst (TREE_OPERAND (arg, 1),
3173 old0, new0, old1, new1));
3175 case tcc_expression:
3176 switch (code)
3178 case SAVE_EXPR:
3179 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3181 case COMPOUND_EXPR:
3182 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3184 case COND_EXPR:
3185 return fold_build3 (code, type,
3186 eval_subst (TREE_OPERAND (arg, 0),
3187 old0, new0, old1, new1),
3188 eval_subst (TREE_OPERAND (arg, 1),
3189 old0, new0, old1, new1),
3190 eval_subst (TREE_OPERAND (arg, 2),
3191 old0, new0, old1, new1));
3192 default:
3193 break;
3195 /* Fall through - ??? */
3197 case tcc_comparison:
3199 tree arg0 = TREE_OPERAND (arg, 0);
3200 tree arg1 = TREE_OPERAND (arg, 1);
3202 /* We need to check both for exact equality and tree equality. The
3203 former will be true if the operand has a side-effect. In that
3204 case, we know the operand occurred exactly once. */
3206 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3207 arg0 = new0;
3208 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3209 arg0 = new1;
3211 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3212 arg1 = new0;
3213 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3214 arg1 = new1;
3216 return fold_build2 (code, type, arg0, arg1);
3219 default:
3220 return arg;
3224 /* Return a tree for the case when the result of an expression is RESULT
3225 converted to TYPE and OMITTED was previously an operand of the expression
3226 but is now not needed (e.g., we folded OMITTED * 0).
3228 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3229 the conversion of RESULT to TYPE. */
3231 tree
3232 omit_one_operand (tree type, tree result, tree omitted)
3234 tree t = fold_convert (type, result);
3236 if (TREE_SIDE_EFFECTS (omitted))
3237 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3239 return non_lvalue (t);
3242 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3244 static tree
3245 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3247 tree t = fold_convert (type, result);
3249 if (TREE_SIDE_EFFECTS (omitted))
3250 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3252 return pedantic_non_lvalue (t);
3255 /* Return a tree for the case when the result of an expression is RESULT
3256 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3257 of the expression but are now not needed.
3259 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3260 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3261 evaluated before OMITTED2. Otherwise, if neither has side effects,
3262 just do the conversion of RESULT to TYPE. */
3264 tree
3265 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3267 tree t = fold_convert (type, result);
3269 if (TREE_SIDE_EFFECTS (omitted2))
3270 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3271 if (TREE_SIDE_EFFECTS (omitted1))
3272 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3274 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3278 /* Return a simplified tree node for the truth-negation of ARG. This
3279 never alters ARG itself. We assume that ARG is an operation that
3280 returns a truth value (0 or 1).
3282 FIXME: one would think we would fold the result, but it causes
3283 problems with the dominator optimizer. */
3285 tree
3286 fold_truth_not_expr (tree arg)
3288 tree type = TREE_TYPE (arg);
3289 enum tree_code code = TREE_CODE (arg);
3291 /* If this is a comparison, we can simply invert it, except for
3292 floating-point non-equality comparisons, in which case we just
3293 enclose a TRUTH_NOT_EXPR around what we have. */
3295 if (TREE_CODE_CLASS (code) == tcc_comparison)
3297 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3298 if (FLOAT_TYPE_P (op_type)
3299 && flag_trapping_math
3300 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3301 && code != NE_EXPR && code != EQ_EXPR)
3302 return NULL_TREE;
3303 else
3305 code = invert_tree_comparison (code,
3306 HONOR_NANS (TYPE_MODE (op_type)));
3307 if (code == ERROR_MARK)
3308 return NULL_TREE;
3309 else
3310 return build2 (code, type,
3311 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3315 switch (code)
3317 case INTEGER_CST:
3318 return constant_boolean_node (integer_zerop (arg), type);
3320 case TRUTH_AND_EXPR:
3321 return build2 (TRUTH_OR_EXPR, type,
3322 invert_truthvalue (TREE_OPERAND (arg, 0)),
3323 invert_truthvalue (TREE_OPERAND (arg, 1)));
3325 case TRUTH_OR_EXPR:
3326 return build2 (TRUTH_AND_EXPR, type,
3327 invert_truthvalue (TREE_OPERAND (arg, 0)),
3328 invert_truthvalue (TREE_OPERAND (arg, 1)));
3330 case TRUTH_XOR_EXPR:
3331 /* Here we can invert either operand. We invert the first operand
3332 unless the second operand is a TRUTH_NOT_EXPR in which case our
3333 result is the XOR of the first operand with the inside of the
3334 negation of the second operand. */
3336 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3337 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3338 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3339 else
3340 return build2 (TRUTH_XOR_EXPR, type,
3341 invert_truthvalue (TREE_OPERAND (arg, 0)),
3342 TREE_OPERAND (arg, 1));
3344 case TRUTH_ANDIF_EXPR:
3345 return build2 (TRUTH_ORIF_EXPR, type,
3346 invert_truthvalue (TREE_OPERAND (arg, 0)),
3347 invert_truthvalue (TREE_OPERAND (arg, 1)));
3349 case TRUTH_ORIF_EXPR:
3350 return build2 (TRUTH_ANDIF_EXPR, type,
3351 invert_truthvalue (TREE_OPERAND (arg, 0)),
3352 invert_truthvalue (TREE_OPERAND (arg, 1)));
3354 case TRUTH_NOT_EXPR:
3355 return TREE_OPERAND (arg, 0);
3357 case COND_EXPR:
3359 tree arg1 = TREE_OPERAND (arg, 1);
3360 tree arg2 = TREE_OPERAND (arg, 2);
3361 /* A COND_EXPR may have a throw as one operand, which
3362 then has void type. Just leave void operands
3363 as they are. */
3364 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3365 VOID_TYPE_P (TREE_TYPE (arg1))
3366 ? arg1 : invert_truthvalue (arg1),
3367 VOID_TYPE_P (TREE_TYPE (arg2))
3368 ? arg2 : invert_truthvalue (arg2));
3371 case COMPOUND_EXPR:
3372 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3373 invert_truthvalue (TREE_OPERAND (arg, 1)));
3375 case NON_LVALUE_EXPR:
3376 return invert_truthvalue (TREE_OPERAND (arg, 0));
3378 case NOP_EXPR:
3379 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3380 return build1 (TRUTH_NOT_EXPR, type, arg);
3382 case CONVERT_EXPR:
3383 case FLOAT_EXPR:
3384 return build1 (TREE_CODE (arg), type,
3385 invert_truthvalue (TREE_OPERAND (arg, 0)));
3387 case BIT_AND_EXPR:
3388 if (!integer_onep (TREE_OPERAND (arg, 1)))
3389 break;
3390 return build2 (EQ_EXPR, type, arg,
3391 build_int_cst (type, 0));
3393 case SAVE_EXPR:
3394 return build1 (TRUTH_NOT_EXPR, type, arg);
3396 case CLEANUP_POINT_EXPR:
3397 return build1 (CLEANUP_POINT_EXPR, type,
3398 invert_truthvalue (TREE_OPERAND (arg, 0)));
3400 default:
3401 break;
3404 return NULL_TREE;
3407 /* Return a simplified tree node for the truth-negation of ARG. This
3408 never alters ARG itself. We assume that ARG is an operation that
3409 returns a truth value (0 or 1).
3411 FIXME: one would think we would fold the result, but it causes
3412 problems with the dominator optimizer. */
3414 tree
3415 invert_truthvalue (tree arg)
3417 tree tem;
3419 if (TREE_CODE (arg) == ERROR_MARK)
3420 return arg;
3422 tem = fold_truth_not_expr (arg);
3423 if (!tem)
3424 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3426 return tem;
3429 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3430 operands are another bit-wise operation with a common input. If so,
3431 distribute the bit operations to save an operation and possibly two if
3432 constants are involved. For example, convert
3433 (A | B) & (A | C) into A | (B & C)
3434 Further simplification will occur if B and C are constants.
3436 If this optimization cannot be done, 0 will be returned. */
3438 static tree
3439 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3441 tree common;
3442 tree left, right;
3444 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3445 || TREE_CODE (arg0) == code
3446 || (TREE_CODE (arg0) != BIT_AND_EXPR
3447 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3448 return 0;
3450 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3452 common = TREE_OPERAND (arg0, 0);
3453 left = TREE_OPERAND (arg0, 1);
3454 right = TREE_OPERAND (arg1, 1);
3456 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3458 common = TREE_OPERAND (arg0, 0);
3459 left = TREE_OPERAND (arg0, 1);
3460 right = TREE_OPERAND (arg1, 0);
3462 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3464 common = TREE_OPERAND (arg0, 1);
3465 left = TREE_OPERAND (arg0, 0);
3466 right = TREE_OPERAND (arg1, 1);
3468 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3470 common = TREE_OPERAND (arg0, 1);
3471 left = TREE_OPERAND (arg0, 0);
3472 right = TREE_OPERAND (arg1, 0);
3474 else
3475 return 0;
3477 return fold_build2 (TREE_CODE (arg0), type, common,
3478 fold_build2 (code, type, left, right));
3481 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3482 with code CODE. This optimization is unsafe. */
3483 static tree
3484 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3486 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3487 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3489 /* (A / C) +- (B / C) -> (A +- B) / C. */
3490 if (mul0 == mul1
3491 && operand_equal_p (TREE_OPERAND (arg0, 1),
3492 TREE_OPERAND (arg1, 1), 0))
3493 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3494 fold_build2 (code, type,
3495 TREE_OPERAND (arg0, 0),
3496 TREE_OPERAND (arg1, 0)),
3497 TREE_OPERAND (arg0, 1));
3499 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3500 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3501 TREE_OPERAND (arg1, 0), 0)
3502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3503 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3505 REAL_VALUE_TYPE r0, r1;
3506 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3507 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3508 if (!mul0)
3509 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3510 if (!mul1)
3511 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3512 real_arithmetic (&r0, code, &r0, &r1);
3513 return fold_build2 (MULT_EXPR, type,
3514 TREE_OPERAND (arg0, 0),
3515 build_real (type, r0));
3518 return NULL_TREE;
3521 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3522 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3524 static tree
3525 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3526 int unsignedp)
3528 tree result;
3530 if (bitpos == 0)
3532 tree size = TYPE_SIZE (TREE_TYPE (inner));
3533 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3534 || POINTER_TYPE_P (TREE_TYPE (inner)))
3535 && host_integerp (size, 0)
3536 && tree_low_cst (size, 0) == bitsize)
3537 return fold_convert (type, inner);
3540 result = build3 (BIT_FIELD_REF, type, inner,
3541 size_int (bitsize), bitsize_int (bitpos));
3543 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3545 return result;
3548 /* Optimize a bit-field compare.
3550 There are two cases: First is a compare against a constant and the
3551 second is a comparison of two items where the fields are at the same
3552 bit position relative to the start of a chunk (byte, halfword, word)
3553 large enough to contain it. In these cases we can avoid the shift
3554 implicit in bitfield extractions.
3556 For constants, we emit a compare of the shifted constant with the
3557 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3558 compared. For two fields at the same position, we do the ANDs with the
3559 similar mask and compare the result of the ANDs.
3561 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3562 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3563 are the left and right operands of the comparison, respectively.
3565 If the optimization described above can be done, we return the resulting
3566 tree. Otherwise we return zero. */
3568 static tree
3569 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3570 tree lhs, tree rhs)
3572 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3573 tree type = TREE_TYPE (lhs);
3574 tree signed_type, unsigned_type;
3575 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3576 enum machine_mode lmode, rmode, nmode;
3577 int lunsignedp, runsignedp;
3578 int lvolatilep = 0, rvolatilep = 0;
3579 tree linner, rinner = NULL_TREE;
3580 tree mask;
3581 tree offset;
3583 /* Get all the information about the extractions being done. If the bit size
3584 if the same as the size of the underlying object, we aren't doing an
3585 extraction at all and so can do nothing. We also don't want to
3586 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3587 then will no longer be able to replace it. */
3588 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3589 &lunsignedp, &lvolatilep, false);
3590 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3591 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3592 return 0;
3594 if (!const_p)
3596 /* If this is not a constant, we can only do something if bit positions,
3597 sizes, and signedness are the same. */
3598 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3599 &runsignedp, &rvolatilep, false);
3601 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3602 || lunsignedp != runsignedp || offset != 0
3603 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3604 return 0;
3607 /* See if we can find a mode to refer to this field. We should be able to,
3608 but fail if we can't. */
3609 nmode = get_best_mode (lbitsize, lbitpos,
3610 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3611 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3612 TYPE_ALIGN (TREE_TYPE (rinner))),
3613 word_mode, lvolatilep || rvolatilep);
3614 if (nmode == VOIDmode)
3615 return 0;
3617 /* Set signed and unsigned types of the precision of this mode for the
3618 shifts below. */
3619 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3620 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3622 /* Compute the bit position and size for the new reference and our offset
3623 within it. If the new reference is the same size as the original, we
3624 won't optimize anything, so return zero. */
3625 nbitsize = GET_MODE_BITSIZE (nmode);
3626 nbitpos = lbitpos & ~ (nbitsize - 1);
3627 lbitpos -= nbitpos;
3628 if (nbitsize == lbitsize)
3629 return 0;
3631 if (BYTES_BIG_ENDIAN)
3632 lbitpos = nbitsize - lbitsize - lbitpos;
3634 /* Make the mask to be used against the extracted field. */
3635 mask = build_int_cst_type (unsigned_type, -1);
3636 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3637 mask = const_binop (RSHIFT_EXPR, mask,
3638 size_int (nbitsize - lbitsize - lbitpos), 0);
3640 if (! const_p)
3641 /* If not comparing with constant, just rework the comparison
3642 and return. */
3643 return fold_build2 (code, compare_type,
3644 fold_build2 (BIT_AND_EXPR, unsigned_type,
3645 make_bit_field_ref (linner,
3646 unsigned_type,
3647 nbitsize, nbitpos,
3649 mask),
3650 fold_build2 (BIT_AND_EXPR, unsigned_type,
3651 make_bit_field_ref (rinner,
3652 unsigned_type,
3653 nbitsize, nbitpos,
3655 mask));
3657 /* Otherwise, we are handling the constant case. See if the constant is too
3658 big for the field. Warn and return a tree of for 0 (false) if so. We do
3659 this not only for its own sake, but to avoid having to test for this
3660 error case below. If we didn't, we might generate wrong code.
3662 For unsigned fields, the constant shifted right by the field length should
3663 be all zero. For signed fields, the high-order bits should agree with
3664 the sign bit. */
3666 if (lunsignedp)
3668 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3669 fold_convert (unsigned_type, rhs),
3670 size_int (lbitsize), 0)))
3672 warning (0, "comparison is always %d due to width of bit-field",
3673 code == NE_EXPR);
3674 return constant_boolean_node (code == NE_EXPR, compare_type);
3677 else
3679 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3680 size_int (lbitsize - 1), 0);
3681 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3683 warning (0, "comparison is always %d due to width of bit-field",
3684 code == NE_EXPR);
3685 return constant_boolean_node (code == NE_EXPR, compare_type);
3689 /* Single-bit compares should always be against zero. */
3690 if (lbitsize == 1 && ! integer_zerop (rhs))
3692 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3693 rhs = build_int_cst (type, 0);
3696 /* Make a new bitfield reference, shift the constant over the
3697 appropriate number of bits and mask it with the computed mask
3698 (in case this was a signed field). If we changed it, make a new one. */
3699 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3700 if (lvolatilep)
3702 TREE_SIDE_EFFECTS (lhs) = 1;
3703 TREE_THIS_VOLATILE (lhs) = 1;
3706 rhs = const_binop (BIT_AND_EXPR,
3707 const_binop (LSHIFT_EXPR,
3708 fold_convert (unsigned_type, rhs),
3709 size_int (lbitpos), 0),
3710 mask, 0);
3712 return build2 (code, compare_type,
3713 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3714 rhs);
3717 /* Subroutine for fold_truthop: decode a field reference.
3719 If EXP is a comparison reference, we return the innermost reference.
3721 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3722 set to the starting bit number.
3724 If the innermost field can be completely contained in a mode-sized
3725 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3727 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3728 otherwise it is not changed.
3730 *PUNSIGNEDP is set to the signedness of the field.
3732 *PMASK is set to the mask used. This is either contained in a
3733 BIT_AND_EXPR or derived from the width of the field.
3735 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3737 Return 0 if this is not a component reference or is one that we can't
3738 do anything with. */
3740 static tree
3741 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3742 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3743 int *punsignedp, int *pvolatilep,
3744 tree *pmask, tree *pand_mask)
3746 tree outer_type = 0;
3747 tree and_mask = 0;
3748 tree mask, inner, offset;
3749 tree unsigned_type;
3750 unsigned int precision;
3752 /* All the optimizations using this function assume integer fields.
3753 There are problems with FP fields since the type_for_size call
3754 below can fail for, e.g., XFmode. */
3755 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3756 return 0;
3758 /* We are interested in the bare arrangement of bits, so strip everything
3759 that doesn't affect the machine mode. However, record the type of the
3760 outermost expression if it may matter below. */
3761 if (TREE_CODE (exp) == NOP_EXPR
3762 || TREE_CODE (exp) == CONVERT_EXPR
3763 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3764 outer_type = TREE_TYPE (exp);
3765 STRIP_NOPS (exp);
3767 if (TREE_CODE (exp) == BIT_AND_EXPR)
3769 and_mask = TREE_OPERAND (exp, 1);
3770 exp = TREE_OPERAND (exp, 0);
3771 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3772 if (TREE_CODE (and_mask) != INTEGER_CST)
3773 return 0;
3776 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3777 punsignedp, pvolatilep, false);
3778 if ((inner == exp && and_mask == 0)
3779 || *pbitsize < 0 || offset != 0
3780 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3781 return 0;
3783 /* If the number of bits in the reference is the same as the bitsize of
3784 the outer type, then the outer type gives the signedness. Otherwise
3785 (in case of a small bitfield) the signedness is unchanged. */
3786 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3787 *punsignedp = TYPE_UNSIGNED (outer_type);
3789 /* Compute the mask to access the bitfield. */
3790 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3791 precision = TYPE_PRECISION (unsigned_type);
3793 mask = build_int_cst_type (unsigned_type, -1);
3795 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3796 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3798 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3799 if (and_mask != 0)
3800 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3801 fold_convert (unsigned_type, and_mask), mask);
3803 *pmask = mask;
3804 *pand_mask = and_mask;
3805 return inner;
3808 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3809 bit positions. */
3811 static int
3812 all_ones_mask_p (tree mask, int size)
3814 tree type = TREE_TYPE (mask);
3815 unsigned int precision = TYPE_PRECISION (type);
3816 tree tmask;
3818 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3820 return
3821 tree_int_cst_equal (mask,
3822 const_binop (RSHIFT_EXPR,
3823 const_binop (LSHIFT_EXPR, tmask,
3824 size_int (precision - size),
3826 size_int (precision - size), 0));
3829 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3830 represents the sign bit of EXP's type. If EXP represents a sign
3831 or zero extension, also test VAL against the unextended type.
3832 The return value is the (sub)expression whose sign bit is VAL,
3833 or NULL_TREE otherwise. */
3835 static tree
3836 sign_bit_p (tree exp, tree val)
3838 unsigned HOST_WIDE_INT mask_lo, lo;
3839 HOST_WIDE_INT mask_hi, hi;
3840 int width;
3841 tree t;
3843 /* Tree EXP must have an integral type. */
3844 t = TREE_TYPE (exp);
3845 if (! INTEGRAL_TYPE_P (t))
3846 return NULL_TREE;
3848 /* Tree VAL must be an integer constant. */
3849 if (TREE_CODE (val) != INTEGER_CST
3850 || TREE_OVERFLOW (val))
3851 return NULL_TREE;
3853 width = TYPE_PRECISION (t);
3854 if (width > HOST_BITS_PER_WIDE_INT)
3856 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3857 lo = 0;
3859 mask_hi = ((unsigned HOST_WIDE_INT) -1
3860 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3861 mask_lo = -1;
3863 else
3865 hi = 0;
3866 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3868 mask_hi = 0;
3869 mask_lo = ((unsigned HOST_WIDE_INT) -1
3870 >> (HOST_BITS_PER_WIDE_INT - width));
3873 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3874 treat VAL as if it were unsigned. */
3875 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3876 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3877 return exp;
3879 /* Handle extension from a narrower type. */
3880 if (TREE_CODE (exp) == NOP_EXPR
3881 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3882 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3884 return NULL_TREE;
3887 /* Subroutine for fold_truthop: determine if an operand is simple enough
3888 to be evaluated unconditionally. */
3890 static int
3891 simple_operand_p (tree exp)
3893 /* Strip any conversions that don't change the machine mode. */
3894 STRIP_NOPS (exp);
3896 return (CONSTANT_CLASS_P (exp)
3897 || TREE_CODE (exp) == SSA_NAME
3898 || (DECL_P (exp)
3899 && ! TREE_ADDRESSABLE (exp)
3900 && ! TREE_THIS_VOLATILE (exp)
3901 && ! DECL_NONLOCAL (exp)
3902 /* Don't regard global variables as simple. They may be
3903 allocated in ways unknown to the compiler (shared memory,
3904 #pragma weak, etc). */
3905 && ! TREE_PUBLIC (exp)
3906 && ! DECL_EXTERNAL (exp)
3907 /* Loading a static variable is unduly expensive, but global
3908 registers aren't expensive. */
3909 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3912 /* The following functions are subroutines to fold_range_test and allow it to
3913 try to change a logical combination of comparisons into a range test.
3915 For example, both
3916 X == 2 || X == 3 || X == 4 || X == 5
3918 X >= 2 && X <= 5
3919 are converted to
3920 (unsigned) (X - 2) <= 3
3922 We describe each set of comparisons as being either inside or outside
3923 a range, using a variable named like IN_P, and then describe the
3924 range with a lower and upper bound. If one of the bounds is omitted,
3925 it represents either the highest or lowest value of the type.
3927 In the comments below, we represent a range by two numbers in brackets
3928 preceded by a "+" to designate being inside that range, or a "-" to
3929 designate being outside that range, so the condition can be inverted by
3930 flipping the prefix. An omitted bound is represented by a "-". For
3931 example, "- [-, 10]" means being outside the range starting at the lowest
3932 possible value and ending at 10, in other words, being greater than 10.
3933 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3934 always false.
3936 We set up things so that the missing bounds are handled in a consistent
3937 manner so neither a missing bound nor "true" and "false" need to be
3938 handled using a special case. */
3940 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3941 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3942 and UPPER1_P are nonzero if the respective argument is an upper bound
3943 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3944 must be specified for a comparison. ARG1 will be converted to ARG0's
3945 type if both are specified. */
3947 static tree
3948 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3949 tree arg1, int upper1_p)
3951 tree tem;
3952 int result;
3953 int sgn0, sgn1;
3955 /* If neither arg represents infinity, do the normal operation.
3956 Else, if not a comparison, return infinity. Else handle the special
3957 comparison rules. Note that most of the cases below won't occur, but
3958 are handled for consistency. */
3960 if (arg0 != 0 && arg1 != 0)
3962 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3963 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3964 STRIP_NOPS (tem);
3965 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3968 if (TREE_CODE_CLASS (code) != tcc_comparison)
3969 return 0;
3971 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3972 for neither. In real maths, we cannot assume open ended ranges are
3973 the same. But, this is computer arithmetic, where numbers are finite.
3974 We can therefore make the transformation of any unbounded range with
3975 the value Z, Z being greater than any representable number. This permits
3976 us to treat unbounded ranges as equal. */
3977 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3978 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3979 switch (code)
3981 case EQ_EXPR:
3982 result = sgn0 == sgn1;
3983 break;
3984 case NE_EXPR:
3985 result = sgn0 != sgn1;
3986 break;
3987 case LT_EXPR:
3988 result = sgn0 < sgn1;
3989 break;
3990 case LE_EXPR:
3991 result = sgn0 <= sgn1;
3992 break;
3993 case GT_EXPR:
3994 result = sgn0 > sgn1;
3995 break;
3996 case GE_EXPR:
3997 result = sgn0 >= sgn1;
3998 break;
3999 default:
4000 gcc_unreachable ();
4003 return constant_boolean_node (result, type);
4006 /* Given EXP, a logical expression, set the range it is testing into
4007 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4008 actually being tested. *PLOW and *PHIGH will be made of the same
4009 type as the returned expression. If EXP is not a comparison, we
4010 will most likely not be returning a useful value and range. Set
4011 *STRICT_OVERFLOW_P to true if the return value is only valid
4012 because signed overflow is undefined; otherwise, do not change
4013 *STRICT_OVERFLOW_P. */
4015 static tree
4016 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4017 bool *strict_overflow_p)
4019 enum tree_code code;
4020 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4021 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4022 int in_p, n_in_p;
4023 tree low, high, n_low, n_high;
4025 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4026 and see if we can refine the range. Some of the cases below may not
4027 happen, but it doesn't seem worth worrying about this. We "continue"
4028 the outer loop when we've changed something; otherwise we "break"
4029 the switch, which will "break" the while. */
4031 in_p = 0;
4032 low = high = build_int_cst (TREE_TYPE (exp), 0);
4034 while (1)
4036 code = TREE_CODE (exp);
4037 exp_type = TREE_TYPE (exp);
4039 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4041 if (TREE_OPERAND_LENGTH (exp) > 0)
4042 arg0 = TREE_OPERAND (exp, 0);
4043 if (TREE_CODE_CLASS (code) == tcc_comparison
4044 || TREE_CODE_CLASS (code) == tcc_unary
4045 || TREE_CODE_CLASS (code) == tcc_binary)
4046 arg0_type = TREE_TYPE (arg0);
4047 if (TREE_CODE_CLASS (code) == tcc_binary
4048 || TREE_CODE_CLASS (code) == tcc_comparison
4049 || (TREE_CODE_CLASS (code) == tcc_expression
4050 && TREE_OPERAND_LENGTH (exp) > 1))
4051 arg1 = TREE_OPERAND (exp, 1);
4054 switch (code)
4056 case TRUTH_NOT_EXPR:
4057 in_p = ! in_p, exp = arg0;
4058 continue;
4060 case EQ_EXPR: case NE_EXPR:
4061 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4062 /* We can only do something if the range is testing for zero
4063 and if the second operand is an integer constant. Note that
4064 saying something is "in" the range we make is done by
4065 complementing IN_P since it will set in the initial case of
4066 being not equal to zero; "out" is leaving it alone. */
4067 if (low == 0 || high == 0
4068 || ! integer_zerop (low) || ! integer_zerop (high)
4069 || TREE_CODE (arg1) != INTEGER_CST)
4070 break;
4072 switch (code)
4074 case NE_EXPR: /* - [c, c] */
4075 low = high = arg1;
4076 break;
4077 case EQ_EXPR: /* + [c, c] */
4078 in_p = ! in_p, low = high = arg1;
4079 break;
4080 case GT_EXPR: /* - [-, c] */
4081 low = 0, high = arg1;
4082 break;
4083 case GE_EXPR: /* + [c, -] */
4084 in_p = ! in_p, low = arg1, high = 0;
4085 break;
4086 case LT_EXPR: /* - [c, -] */
4087 low = arg1, high = 0;
4088 break;
4089 case LE_EXPR: /* + [-, c] */
4090 in_p = ! in_p, low = 0, high = arg1;
4091 break;
4092 default:
4093 gcc_unreachable ();
4096 /* If this is an unsigned comparison, we also know that EXP is
4097 greater than or equal to zero. We base the range tests we make
4098 on that fact, so we record it here so we can parse existing
4099 range tests. We test arg0_type since often the return type
4100 of, e.g. EQ_EXPR, is boolean. */
4101 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4103 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4104 in_p, low, high, 1,
4105 build_int_cst (arg0_type, 0),
4106 NULL_TREE))
4107 break;
4109 in_p = n_in_p, low = n_low, high = n_high;
4111 /* If the high bound is missing, but we have a nonzero low
4112 bound, reverse the range so it goes from zero to the low bound
4113 minus 1. */
4114 if (high == 0 && low && ! integer_zerop (low))
4116 in_p = ! in_p;
4117 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4118 integer_one_node, 0);
4119 low = build_int_cst (arg0_type, 0);
4123 exp = arg0;
4124 continue;
4126 case NEGATE_EXPR:
4127 /* (-x) IN [a,b] -> x in [-b, -a] */
4128 n_low = range_binop (MINUS_EXPR, exp_type,
4129 build_int_cst (exp_type, 0),
4130 0, high, 1);
4131 n_high = range_binop (MINUS_EXPR, exp_type,
4132 build_int_cst (exp_type, 0),
4133 0, low, 0);
4134 low = n_low, high = n_high;
4135 exp = arg0;
4136 continue;
4138 case BIT_NOT_EXPR:
4139 /* ~ X -> -X - 1 */
4140 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4141 build_int_cst (exp_type, 1));
4142 continue;
4144 case PLUS_EXPR: case MINUS_EXPR:
4145 if (TREE_CODE (arg1) != INTEGER_CST)
4146 break;
4148 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4149 move a constant to the other side. */
4150 if (!TYPE_UNSIGNED (arg0_type)
4151 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4152 break;
4154 /* If EXP is signed, any overflow in the computation is undefined,
4155 so we don't worry about it so long as our computations on
4156 the bounds don't overflow. For unsigned, overflow is defined
4157 and this is exactly the right thing. */
4158 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4159 arg0_type, low, 0, arg1, 0);
4160 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4161 arg0_type, high, 1, arg1, 0);
4162 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4163 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4164 break;
4166 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4167 *strict_overflow_p = true;
4169 /* Check for an unsigned range which has wrapped around the maximum
4170 value thus making n_high < n_low, and normalize it. */
4171 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4173 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4174 integer_one_node, 0);
4175 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4176 integer_one_node, 0);
4178 /* If the range is of the form +/- [ x+1, x ], we won't
4179 be able to normalize it. But then, it represents the
4180 whole range or the empty set, so make it
4181 +/- [ -, - ]. */
4182 if (tree_int_cst_equal (n_low, low)
4183 && tree_int_cst_equal (n_high, high))
4184 low = high = 0;
4185 else
4186 in_p = ! in_p;
4188 else
4189 low = n_low, high = n_high;
4191 exp = arg0;
4192 continue;
4194 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4195 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4196 break;
4198 if (! INTEGRAL_TYPE_P (arg0_type)
4199 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4200 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4201 break;
4203 n_low = low, n_high = high;
4205 if (n_low != 0)
4206 n_low = fold_convert (arg0_type, n_low);
4208 if (n_high != 0)
4209 n_high = fold_convert (arg0_type, n_high);
4212 /* If we're converting arg0 from an unsigned type, to exp,
4213 a signed type, we will be doing the comparison as unsigned.
4214 The tests above have already verified that LOW and HIGH
4215 are both positive.
4217 So we have to ensure that we will handle large unsigned
4218 values the same way that the current signed bounds treat
4219 negative values. */
4221 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4223 tree high_positive;
4224 tree equiv_type = lang_hooks.types.type_for_mode
4225 (TYPE_MODE (arg0_type), 1);
4227 /* A range without an upper bound is, naturally, unbounded.
4228 Since convert would have cropped a very large value, use
4229 the max value for the destination type. */
4230 high_positive
4231 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4232 : TYPE_MAX_VALUE (arg0_type);
4234 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4235 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4236 fold_convert (arg0_type,
4237 high_positive),
4238 build_int_cst (arg0_type, 1));
4240 /* If the low bound is specified, "and" the range with the
4241 range for which the original unsigned value will be
4242 positive. */
4243 if (low != 0)
4245 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4246 1, n_low, n_high, 1,
4247 fold_convert (arg0_type,
4248 integer_zero_node),
4249 high_positive))
4250 break;
4252 in_p = (n_in_p == in_p);
4254 else
4256 /* Otherwise, "or" the range with the range of the input
4257 that will be interpreted as negative. */
4258 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4259 0, n_low, n_high, 1,
4260 fold_convert (arg0_type,
4261 integer_zero_node),
4262 high_positive))
4263 break;
4265 in_p = (in_p != n_in_p);
4269 exp = arg0;
4270 low = n_low, high = n_high;
4271 continue;
4273 default:
4274 break;
4277 break;
4280 /* If EXP is a constant, we can evaluate whether this is true or false. */
4281 if (TREE_CODE (exp) == INTEGER_CST)
4283 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4284 exp, 0, low, 0))
4285 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4286 exp, 1, high, 1)));
4287 low = high = 0;
4288 exp = 0;
4291 *pin_p = in_p, *plow = low, *phigh = high;
4292 return exp;
4295 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4296 type, TYPE, return an expression to test if EXP is in (or out of, depending
4297 on IN_P) the range. Return 0 if the test couldn't be created. */
4299 static tree
4300 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4302 tree etype = TREE_TYPE (exp);
4303 tree value;
4305 #ifdef HAVE_canonicalize_funcptr_for_compare
4306 /* Disable this optimization for function pointer expressions
4307 on targets that require function pointer canonicalization. */
4308 if (HAVE_canonicalize_funcptr_for_compare
4309 && TREE_CODE (etype) == POINTER_TYPE
4310 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4311 return NULL_TREE;
4312 #endif
4314 if (! in_p)
4316 value = build_range_check (type, exp, 1, low, high);
4317 if (value != 0)
4318 return invert_truthvalue (value);
4320 return 0;
4323 if (low == 0 && high == 0)
4324 return build_int_cst (type, 1);
4326 if (low == 0)
4327 return fold_build2 (LE_EXPR, type, exp,
4328 fold_convert (etype, high));
4330 if (high == 0)
4331 return fold_build2 (GE_EXPR, type, exp,
4332 fold_convert (etype, low));
4334 if (operand_equal_p (low, high, 0))
4335 return fold_build2 (EQ_EXPR, type, exp,
4336 fold_convert (etype, low));
4338 if (integer_zerop (low))
4340 if (! TYPE_UNSIGNED (etype))
4342 etype = lang_hooks.types.unsigned_type (etype);
4343 high = fold_convert (etype, high);
4344 exp = fold_convert (etype, exp);
4346 return build_range_check (type, exp, 1, 0, high);
4349 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4350 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4352 unsigned HOST_WIDE_INT lo;
4353 HOST_WIDE_INT hi;
4354 int prec;
4356 prec = TYPE_PRECISION (etype);
4357 if (prec <= HOST_BITS_PER_WIDE_INT)
4359 hi = 0;
4360 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4362 else
4364 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4365 lo = (unsigned HOST_WIDE_INT) -1;
4368 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4370 if (TYPE_UNSIGNED (etype))
4372 etype = lang_hooks.types.signed_type (etype);
4373 exp = fold_convert (etype, exp);
4375 return fold_build2 (GT_EXPR, type, exp,
4376 build_int_cst (etype, 0));
4380 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4381 This requires wrap-around arithmetics for the type of the expression. */
4382 switch (TREE_CODE (etype))
4384 case INTEGER_TYPE:
4385 /* There is no requirement that LOW be within the range of ETYPE
4386 if the latter is a subtype. It must, however, be within the base
4387 type of ETYPE. So be sure we do the subtraction in that type. */
4388 if (TREE_TYPE (etype))
4389 etype = TREE_TYPE (etype);
4390 break;
4392 case ENUMERAL_TYPE:
4393 case BOOLEAN_TYPE:
4394 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4395 TYPE_UNSIGNED (etype));
4396 break;
4398 default:
4399 break;
4402 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4403 if (TREE_CODE (etype) == INTEGER_TYPE
4404 && !TYPE_OVERFLOW_WRAPS (etype))
4406 tree utype, minv, maxv;
4408 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4409 for the type in question, as we rely on this here. */
4410 utype = lang_hooks.types.unsigned_type (etype);
4411 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4412 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4413 integer_one_node, 1);
4414 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4416 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4417 minv, 1, maxv, 1)))
4418 etype = utype;
4419 else
4420 return 0;
4423 high = fold_convert (etype, high);
4424 low = fold_convert (etype, low);
4425 exp = fold_convert (etype, exp);
4427 value = const_binop (MINUS_EXPR, high, low, 0);
4429 if (value != 0 && !TREE_OVERFLOW (value))
4430 return build_range_check (type,
4431 fold_build2 (MINUS_EXPR, etype, exp, low),
4432 1, build_int_cst (etype, 0), value);
4434 return 0;
4437 /* Return the predecessor of VAL in its type, handling the infinite case. */
4439 static tree
4440 range_predecessor (tree val)
4442 tree type = TREE_TYPE (val);
4444 if (INTEGRAL_TYPE_P (type)
4445 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4446 return 0;
4447 else
4448 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4451 /* Return the successor of VAL in its type, handling the infinite case. */
4453 static tree
4454 range_successor (tree val)
4456 tree type = TREE_TYPE (val);
4458 if (INTEGRAL_TYPE_P (type)
4459 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4460 return 0;
4461 else
4462 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4465 /* Given two ranges, see if we can merge them into one. Return 1 if we
4466 can, 0 if we can't. Set the output range into the specified parameters. */
4468 static int
4469 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4470 tree high0, int in1_p, tree low1, tree high1)
4472 int no_overlap;
4473 int subset;
4474 int temp;
4475 tree tem;
4476 int in_p;
4477 tree low, high;
4478 int lowequal = ((low0 == 0 && low1 == 0)
4479 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4480 low0, 0, low1, 0)));
4481 int highequal = ((high0 == 0 && high1 == 0)
4482 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4483 high0, 1, high1, 1)));
4485 /* Make range 0 be the range that starts first, or ends last if they
4486 start at the same value. Swap them if it isn't. */
4487 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4488 low0, 0, low1, 0))
4489 || (lowequal
4490 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4491 high1, 1, high0, 1))))
4493 temp = in0_p, in0_p = in1_p, in1_p = temp;
4494 tem = low0, low0 = low1, low1 = tem;
4495 tem = high0, high0 = high1, high1 = tem;
4498 /* Now flag two cases, whether the ranges are disjoint or whether the
4499 second range is totally subsumed in the first. Note that the tests
4500 below are simplified by the ones above. */
4501 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4502 high0, 1, low1, 0));
4503 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4504 high1, 1, high0, 1));
4506 /* We now have four cases, depending on whether we are including or
4507 excluding the two ranges. */
4508 if (in0_p && in1_p)
4510 /* If they don't overlap, the result is false. If the second range
4511 is a subset it is the result. Otherwise, the range is from the start
4512 of the second to the end of the first. */
4513 if (no_overlap)
4514 in_p = 0, low = high = 0;
4515 else if (subset)
4516 in_p = 1, low = low1, high = high1;
4517 else
4518 in_p = 1, low = low1, high = high0;
4521 else if (in0_p && ! in1_p)
4523 /* If they don't overlap, the result is the first range. If they are
4524 equal, the result is false. If the second range is a subset of the
4525 first, and the ranges begin at the same place, we go from just after
4526 the end of the second range to the end of the first. If the second
4527 range is not a subset of the first, or if it is a subset and both
4528 ranges end at the same place, the range starts at the start of the
4529 first range and ends just before the second range.
4530 Otherwise, we can't describe this as a single range. */
4531 if (no_overlap)
4532 in_p = 1, low = low0, high = high0;
4533 else if (lowequal && highequal)
4534 in_p = 0, low = high = 0;
4535 else if (subset && lowequal)
4537 low = range_successor (high1);
4538 high = high0;
4539 in_p = (low != 0);
4541 else if (! subset || highequal)
4543 low = low0;
4544 high = range_predecessor (low1);
4545 in_p = (high != 0);
4547 else
4548 return 0;
4551 else if (! in0_p && in1_p)
4553 /* If they don't overlap, the result is the second range. If the second
4554 is a subset of the first, the result is false. Otherwise,
4555 the range starts just after the first range and ends at the
4556 end of the second. */
4557 if (no_overlap)
4558 in_p = 1, low = low1, high = high1;
4559 else if (subset || highequal)
4560 in_p = 0, low = high = 0;
4561 else
4563 low = range_successor (high0);
4564 high = high1;
4565 in_p = (low != 0);
4569 else
4571 /* The case where we are excluding both ranges. Here the complex case
4572 is if they don't overlap. In that case, the only time we have a
4573 range is if they are adjacent. If the second is a subset of the
4574 first, the result is the first. Otherwise, the range to exclude
4575 starts at the beginning of the first range and ends at the end of the
4576 second. */
4577 if (no_overlap)
4579 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4580 range_successor (high0),
4581 1, low1, 0)))
4582 in_p = 0, low = low0, high = high1;
4583 else
4585 /* Canonicalize - [min, x] into - [-, x]. */
4586 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4587 switch (TREE_CODE (TREE_TYPE (low0)))
4589 case ENUMERAL_TYPE:
4590 if (TYPE_PRECISION (TREE_TYPE (low0))
4591 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4592 break;
4593 /* FALLTHROUGH */
4594 case INTEGER_TYPE:
4595 if (tree_int_cst_equal (low0,
4596 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4597 low0 = 0;
4598 break;
4599 case POINTER_TYPE:
4600 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4601 && integer_zerop (low0))
4602 low0 = 0;
4603 break;
4604 default:
4605 break;
4608 /* Canonicalize - [x, max] into - [x, -]. */
4609 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4610 switch (TREE_CODE (TREE_TYPE (high1)))
4612 case ENUMERAL_TYPE:
4613 if (TYPE_PRECISION (TREE_TYPE (high1))
4614 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4615 break;
4616 /* FALLTHROUGH */
4617 case INTEGER_TYPE:
4618 if (tree_int_cst_equal (high1,
4619 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4620 high1 = 0;
4621 break;
4622 case POINTER_TYPE:
4623 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4624 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4625 high1, 1,
4626 integer_one_node, 1)))
4627 high1 = 0;
4628 break;
4629 default:
4630 break;
4633 /* The ranges might be also adjacent between the maximum and
4634 minimum values of the given type. For
4635 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4636 return + [x + 1, y - 1]. */
4637 if (low0 == 0 && high1 == 0)
4639 low = range_successor (high0);
4640 high = range_predecessor (low1);
4641 if (low == 0 || high == 0)
4642 return 0;
4644 in_p = 1;
4646 else
4647 return 0;
4650 else if (subset)
4651 in_p = 0, low = low0, high = high0;
4652 else
4653 in_p = 0, low = low0, high = high1;
4656 *pin_p = in_p, *plow = low, *phigh = high;
4657 return 1;
4661 /* Subroutine of fold, looking inside expressions of the form
4662 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4663 of the COND_EXPR. This function is being used also to optimize
4664 A op B ? C : A, by reversing the comparison first.
4666 Return a folded expression whose code is not a COND_EXPR
4667 anymore, or NULL_TREE if no folding opportunity is found. */
4669 static tree
4670 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4672 enum tree_code comp_code = TREE_CODE (arg0);
4673 tree arg00 = TREE_OPERAND (arg0, 0);
4674 tree arg01 = TREE_OPERAND (arg0, 1);
4675 tree arg1_type = TREE_TYPE (arg1);
4676 tree tem;
4678 STRIP_NOPS (arg1);
4679 STRIP_NOPS (arg2);
4681 /* If we have A op 0 ? A : -A, consider applying the following
4682 transformations:
4684 A == 0? A : -A same as -A
4685 A != 0? A : -A same as A
4686 A >= 0? A : -A same as abs (A)
4687 A > 0? A : -A same as abs (A)
4688 A <= 0? A : -A same as -abs (A)
4689 A < 0? A : -A same as -abs (A)
4691 None of these transformations work for modes with signed
4692 zeros. If A is +/-0, the first two transformations will
4693 change the sign of the result (from +0 to -0, or vice
4694 versa). The last four will fix the sign of the result,
4695 even though the original expressions could be positive or
4696 negative, depending on the sign of A.
4698 Note that all these transformations are correct if A is
4699 NaN, since the two alternatives (A and -A) are also NaNs. */
4700 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4701 ? real_zerop (arg01)
4702 : integer_zerop (arg01))
4703 && ((TREE_CODE (arg2) == NEGATE_EXPR
4704 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4705 /* In the case that A is of the form X-Y, '-A' (arg2) may
4706 have already been folded to Y-X, check for that. */
4707 || (TREE_CODE (arg1) == MINUS_EXPR
4708 && TREE_CODE (arg2) == MINUS_EXPR
4709 && operand_equal_p (TREE_OPERAND (arg1, 0),
4710 TREE_OPERAND (arg2, 1), 0)
4711 && operand_equal_p (TREE_OPERAND (arg1, 1),
4712 TREE_OPERAND (arg2, 0), 0))))
4713 switch (comp_code)
4715 case EQ_EXPR:
4716 case UNEQ_EXPR:
4717 tem = fold_convert (arg1_type, arg1);
4718 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4719 case NE_EXPR:
4720 case LTGT_EXPR:
4721 return pedantic_non_lvalue (fold_convert (type, arg1));
4722 case UNGE_EXPR:
4723 case UNGT_EXPR:
4724 if (flag_trapping_math)
4725 break;
4726 /* Fall through. */
4727 case GE_EXPR:
4728 case GT_EXPR:
4729 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4730 arg1 = fold_convert (lang_hooks.types.signed_type
4731 (TREE_TYPE (arg1)), arg1);
4732 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4733 return pedantic_non_lvalue (fold_convert (type, tem));
4734 case UNLE_EXPR:
4735 case UNLT_EXPR:
4736 if (flag_trapping_math)
4737 break;
4738 case LE_EXPR:
4739 case LT_EXPR:
4740 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4741 arg1 = fold_convert (lang_hooks.types.signed_type
4742 (TREE_TYPE (arg1)), arg1);
4743 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4744 return negate_expr (fold_convert (type, tem));
4745 default:
4746 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4747 break;
4750 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4751 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4752 both transformations are correct when A is NaN: A != 0
4753 is then true, and A == 0 is false. */
4755 if (integer_zerop (arg01) && integer_zerop (arg2))
4757 if (comp_code == NE_EXPR)
4758 return pedantic_non_lvalue (fold_convert (type, arg1));
4759 else if (comp_code == EQ_EXPR)
4760 return build_int_cst (type, 0);
4763 /* Try some transformations of A op B ? A : B.
4765 A == B? A : B same as B
4766 A != B? A : B same as A
4767 A >= B? A : B same as max (A, B)
4768 A > B? A : B same as max (B, A)
4769 A <= B? A : B same as min (A, B)
4770 A < B? A : B same as min (B, A)
4772 As above, these transformations don't work in the presence
4773 of signed zeros. For example, if A and B are zeros of
4774 opposite sign, the first two transformations will change
4775 the sign of the result. In the last four, the original
4776 expressions give different results for (A=+0, B=-0) and
4777 (A=-0, B=+0), but the transformed expressions do not.
4779 The first two transformations are correct if either A or B
4780 is a NaN. In the first transformation, the condition will
4781 be false, and B will indeed be chosen. In the case of the
4782 second transformation, the condition A != B will be true,
4783 and A will be chosen.
4785 The conversions to max() and min() are not correct if B is
4786 a number and A is not. The conditions in the original
4787 expressions will be false, so all four give B. The min()
4788 and max() versions would give a NaN instead. */
4789 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4790 /* Avoid these transformations if the COND_EXPR may be used
4791 as an lvalue in the C++ front-end. PR c++/19199. */
4792 && (in_gimple_form
4793 || (strcmp (lang_hooks.name, "GNU C++") != 0
4794 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4795 || ! maybe_lvalue_p (arg1)
4796 || ! maybe_lvalue_p (arg2)))
4798 tree comp_op0 = arg00;
4799 tree comp_op1 = arg01;
4800 tree comp_type = TREE_TYPE (comp_op0);
4802 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4803 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4805 comp_type = type;
4806 comp_op0 = arg1;
4807 comp_op1 = arg2;
4810 switch (comp_code)
4812 case EQ_EXPR:
4813 return pedantic_non_lvalue (fold_convert (type, arg2));
4814 case NE_EXPR:
4815 return pedantic_non_lvalue (fold_convert (type, arg1));
4816 case LE_EXPR:
4817 case LT_EXPR:
4818 case UNLE_EXPR:
4819 case UNLT_EXPR:
4820 /* In C++ a ?: expression can be an lvalue, so put the
4821 operand which will be used if they are equal first
4822 so that we can convert this back to the
4823 corresponding COND_EXPR. */
4824 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4826 comp_op0 = fold_convert (comp_type, comp_op0);
4827 comp_op1 = fold_convert (comp_type, comp_op1);
4828 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4829 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4830 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4831 return pedantic_non_lvalue (fold_convert (type, tem));
4833 break;
4834 case GE_EXPR:
4835 case GT_EXPR:
4836 case UNGE_EXPR:
4837 case UNGT_EXPR:
4838 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4840 comp_op0 = fold_convert (comp_type, comp_op0);
4841 comp_op1 = fold_convert (comp_type, comp_op1);
4842 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4843 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4844 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4845 return pedantic_non_lvalue (fold_convert (type, tem));
4847 break;
4848 case UNEQ_EXPR:
4849 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4850 return pedantic_non_lvalue (fold_convert (type, arg2));
4851 break;
4852 case LTGT_EXPR:
4853 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4854 return pedantic_non_lvalue (fold_convert (type, arg1));
4855 break;
4856 default:
4857 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4858 break;
4862 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4863 we might still be able to simplify this. For example,
4864 if C1 is one less or one more than C2, this might have started
4865 out as a MIN or MAX and been transformed by this function.
4866 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4868 if (INTEGRAL_TYPE_P (type)
4869 && TREE_CODE (arg01) == INTEGER_CST
4870 && TREE_CODE (arg2) == INTEGER_CST)
4871 switch (comp_code)
4873 case EQ_EXPR:
4874 /* We can replace A with C1 in this case. */
4875 arg1 = fold_convert (type, arg01);
4876 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4878 case LT_EXPR:
4879 /* If C1 is C2 + 1, this is min(A, C2). */
4880 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4881 OEP_ONLY_CONST)
4882 && operand_equal_p (arg01,
4883 const_binop (PLUS_EXPR, arg2,
4884 build_int_cst (type, 1), 0),
4885 OEP_ONLY_CONST))
4886 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4887 type, arg1, arg2));
4888 break;
4890 case LE_EXPR:
4891 /* If C1 is C2 - 1, this is min(A, C2). */
4892 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4893 OEP_ONLY_CONST)
4894 && operand_equal_p (arg01,
4895 const_binop (MINUS_EXPR, arg2,
4896 build_int_cst (type, 1), 0),
4897 OEP_ONLY_CONST))
4898 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4899 type, arg1, arg2));
4900 break;
4902 case GT_EXPR:
4903 /* If C1 is C2 - 1, this is max(A, C2). */
4904 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4905 OEP_ONLY_CONST)
4906 && operand_equal_p (arg01,
4907 const_binop (MINUS_EXPR, arg2,
4908 build_int_cst (type, 1), 0),
4909 OEP_ONLY_CONST))
4910 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4911 type, arg1, arg2));
4912 break;
4914 case GE_EXPR:
4915 /* If C1 is C2 + 1, this is max(A, C2). */
4916 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4917 OEP_ONLY_CONST)
4918 && operand_equal_p (arg01,
4919 const_binop (PLUS_EXPR, arg2,
4920 build_int_cst (type, 1), 0),
4921 OEP_ONLY_CONST))
4922 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4923 type, arg1, arg2));
4924 break;
4925 case NE_EXPR:
4926 break;
4927 default:
4928 gcc_unreachable ();
4931 return NULL_TREE;
4936 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4937 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4938 #endif
4940 /* EXP is some logical combination of boolean tests. See if we can
4941 merge it into some range test. Return the new tree if so. */
4943 static tree
4944 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4946 int or_op = (code == TRUTH_ORIF_EXPR
4947 || code == TRUTH_OR_EXPR);
4948 int in0_p, in1_p, in_p;
4949 tree low0, low1, low, high0, high1, high;
4950 bool strict_overflow_p = false;
4951 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4952 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4953 tree tem;
4954 const char * const warnmsg = G_("assuming signed overflow does not occur "
4955 "when simplifying range test");
4957 /* If this is an OR operation, invert both sides; we will invert
4958 again at the end. */
4959 if (or_op)
4960 in0_p = ! in0_p, in1_p = ! in1_p;
4962 /* If both expressions are the same, if we can merge the ranges, and we
4963 can build the range test, return it or it inverted. If one of the
4964 ranges is always true or always false, consider it to be the same
4965 expression as the other. */
4966 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4967 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4968 in1_p, low1, high1)
4969 && 0 != (tem = (build_range_check (type,
4970 lhs != 0 ? lhs
4971 : rhs != 0 ? rhs : integer_zero_node,
4972 in_p, low, high))))
4974 if (strict_overflow_p)
4975 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4976 return or_op ? invert_truthvalue (tem) : tem;
4979 /* On machines where the branch cost is expensive, if this is a
4980 short-circuited branch and the underlying object on both sides
4981 is the same, make a non-short-circuit operation. */
4982 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4983 && lhs != 0 && rhs != 0
4984 && (code == TRUTH_ANDIF_EXPR
4985 || code == TRUTH_ORIF_EXPR)
4986 && operand_equal_p (lhs, rhs, 0))
4988 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4989 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4990 which cases we can't do this. */
4991 if (simple_operand_p (lhs))
4992 return build2 (code == TRUTH_ANDIF_EXPR
4993 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4994 type, op0, op1);
4996 else if (lang_hooks.decls.global_bindings_p () == 0
4997 && ! CONTAINS_PLACEHOLDER_P (lhs))
4999 tree common = save_expr (lhs);
5001 if (0 != (lhs = build_range_check (type, common,
5002 or_op ? ! in0_p : in0_p,
5003 low0, high0))
5004 && (0 != (rhs = build_range_check (type, common,
5005 or_op ? ! in1_p : in1_p,
5006 low1, high1))))
5008 if (strict_overflow_p)
5009 fold_overflow_warning (warnmsg,
5010 WARN_STRICT_OVERFLOW_COMPARISON);
5011 return build2 (code == TRUTH_ANDIF_EXPR
5012 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5013 type, lhs, rhs);
5018 return 0;
5021 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5022 bit value. Arrange things so the extra bits will be set to zero if and
5023 only if C is signed-extended to its full width. If MASK is nonzero,
5024 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5026 static tree
5027 unextend (tree c, int p, int unsignedp, tree mask)
5029 tree type = TREE_TYPE (c);
5030 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5031 tree temp;
5033 if (p == modesize || unsignedp)
5034 return c;
5036 /* We work by getting just the sign bit into the low-order bit, then
5037 into the high-order bit, then sign-extend. We then XOR that value
5038 with C. */
5039 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5040 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5042 /* We must use a signed type in order to get an arithmetic right shift.
5043 However, we must also avoid introducing accidental overflows, so that
5044 a subsequent call to integer_zerop will work. Hence we must
5045 do the type conversion here. At this point, the constant is either
5046 zero or one, and the conversion to a signed type can never overflow.
5047 We could get an overflow if this conversion is done anywhere else. */
5048 if (TYPE_UNSIGNED (type))
5049 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
5051 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5052 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5053 if (mask != 0)
5054 temp = const_binop (BIT_AND_EXPR, temp,
5055 fold_convert (TREE_TYPE (c), mask), 0);
5056 /* If necessary, convert the type back to match the type of C. */
5057 if (TYPE_UNSIGNED (type))
5058 temp = fold_convert (type, temp);
5060 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5063 /* Find ways of folding logical expressions of LHS and RHS:
5064 Try to merge two comparisons to the same innermost item.
5065 Look for range tests like "ch >= '0' && ch <= '9'".
5066 Look for combinations of simple terms on machines with expensive branches
5067 and evaluate the RHS unconditionally.
5069 For example, if we have p->a == 2 && p->b == 4 and we can make an
5070 object large enough to span both A and B, we can do this with a comparison
5071 against the object ANDed with the a mask.
5073 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5074 operations to do this with one comparison.
5076 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5077 function and the one above.
5079 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5080 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5082 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5083 two operands.
5085 We return the simplified tree or 0 if no optimization is possible. */
5087 static tree
5088 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5090 /* If this is the "or" of two comparisons, we can do something if
5091 the comparisons are NE_EXPR. If this is the "and", we can do something
5092 if the comparisons are EQ_EXPR. I.e.,
5093 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5095 WANTED_CODE is this operation code. For single bit fields, we can
5096 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5097 comparison for one-bit fields. */
5099 enum tree_code wanted_code;
5100 enum tree_code lcode, rcode;
5101 tree ll_arg, lr_arg, rl_arg, rr_arg;
5102 tree ll_inner, lr_inner, rl_inner, rr_inner;
5103 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5104 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5105 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5106 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5107 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5108 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5109 enum machine_mode lnmode, rnmode;
5110 tree ll_mask, lr_mask, rl_mask, rr_mask;
5111 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5112 tree l_const, r_const;
5113 tree lntype, rntype, result;
5114 int first_bit, end_bit;
5115 int volatilep;
5116 tree orig_lhs = lhs, orig_rhs = rhs;
5117 enum tree_code orig_code = code;
5119 /* Start by getting the comparison codes. Fail if anything is volatile.
5120 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5121 it were surrounded with a NE_EXPR. */
5123 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5124 return 0;
5126 lcode = TREE_CODE (lhs);
5127 rcode = TREE_CODE (rhs);
5129 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5131 lhs = build2 (NE_EXPR, truth_type, lhs,
5132 build_int_cst (TREE_TYPE (lhs), 0));
5133 lcode = NE_EXPR;
5136 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5138 rhs = build2 (NE_EXPR, truth_type, rhs,
5139 build_int_cst (TREE_TYPE (rhs), 0));
5140 rcode = NE_EXPR;
5143 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5144 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5145 return 0;
5147 ll_arg = TREE_OPERAND (lhs, 0);
5148 lr_arg = TREE_OPERAND (lhs, 1);
5149 rl_arg = TREE_OPERAND (rhs, 0);
5150 rr_arg = TREE_OPERAND (rhs, 1);
5152 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5153 if (simple_operand_p (ll_arg)
5154 && simple_operand_p (lr_arg))
5156 tree result;
5157 if (operand_equal_p (ll_arg, rl_arg, 0)
5158 && operand_equal_p (lr_arg, rr_arg, 0))
5160 result = combine_comparisons (code, lcode, rcode,
5161 truth_type, ll_arg, lr_arg);
5162 if (result)
5163 return result;
5165 else if (operand_equal_p (ll_arg, rr_arg, 0)
5166 && operand_equal_p (lr_arg, rl_arg, 0))
5168 result = combine_comparisons (code, lcode,
5169 swap_tree_comparison (rcode),
5170 truth_type, ll_arg, lr_arg);
5171 if (result)
5172 return result;
5176 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5177 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5179 /* If the RHS can be evaluated unconditionally and its operands are
5180 simple, it wins to evaluate the RHS unconditionally on machines
5181 with expensive branches. In this case, this isn't a comparison
5182 that can be merged. Avoid doing this if the RHS is a floating-point
5183 comparison since those can trap. */
5185 if (BRANCH_COST >= 2
5186 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5187 && simple_operand_p (rl_arg)
5188 && simple_operand_p (rr_arg))
5190 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5191 if (code == TRUTH_OR_EXPR
5192 && lcode == NE_EXPR && integer_zerop (lr_arg)
5193 && rcode == NE_EXPR && integer_zerop (rr_arg)
5194 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5195 return build2 (NE_EXPR, truth_type,
5196 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5197 ll_arg, rl_arg),
5198 build_int_cst (TREE_TYPE (ll_arg), 0));
5200 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5201 if (code == TRUTH_AND_EXPR
5202 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5203 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5204 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5205 return build2 (EQ_EXPR, truth_type,
5206 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5207 ll_arg, rl_arg),
5208 build_int_cst (TREE_TYPE (ll_arg), 0));
5210 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5212 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5213 return build2 (code, truth_type, lhs, rhs);
5214 return NULL_TREE;
5218 /* See if the comparisons can be merged. Then get all the parameters for
5219 each side. */
5221 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5222 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5223 return 0;
5225 volatilep = 0;
5226 ll_inner = decode_field_reference (ll_arg,
5227 &ll_bitsize, &ll_bitpos, &ll_mode,
5228 &ll_unsignedp, &volatilep, &ll_mask,
5229 &ll_and_mask);
5230 lr_inner = decode_field_reference (lr_arg,
5231 &lr_bitsize, &lr_bitpos, &lr_mode,
5232 &lr_unsignedp, &volatilep, &lr_mask,
5233 &lr_and_mask);
5234 rl_inner = decode_field_reference (rl_arg,
5235 &rl_bitsize, &rl_bitpos, &rl_mode,
5236 &rl_unsignedp, &volatilep, &rl_mask,
5237 &rl_and_mask);
5238 rr_inner = decode_field_reference (rr_arg,
5239 &rr_bitsize, &rr_bitpos, &rr_mode,
5240 &rr_unsignedp, &volatilep, &rr_mask,
5241 &rr_and_mask);
5243 /* It must be true that the inner operation on the lhs of each
5244 comparison must be the same if we are to be able to do anything.
5245 Then see if we have constants. If not, the same must be true for
5246 the rhs's. */
5247 if (volatilep || ll_inner == 0 || rl_inner == 0
5248 || ! operand_equal_p (ll_inner, rl_inner, 0))
5249 return 0;
5251 if (TREE_CODE (lr_arg) == INTEGER_CST
5252 && TREE_CODE (rr_arg) == INTEGER_CST)
5253 l_const = lr_arg, r_const = rr_arg;
5254 else if (lr_inner == 0 || rr_inner == 0
5255 || ! operand_equal_p (lr_inner, rr_inner, 0))
5256 return 0;
5257 else
5258 l_const = r_const = 0;
5260 /* If either comparison code is not correct for our logical operation,
5261 fail. However, we can convert a one-bit comparison against zero into
5262 the opposite comparison against that bit being set in the field. */
5264 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5265 if (lcode != wanted_code)
5267 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5269 /* Make the left operand unsigned, since we are only interested
5270 in the value of one bit. Otherwise we are doing the wrong
5271 thing below. */
5272 ll_unsignedp = 1;
5273 l_const = ll_mask;
5275 else
5276 return 0;
5279 /* This is analogous to the code for l_const above. */
5280 if (rcode != wanted_code)
5282 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5284 rl_unsignedp = 1;
5285 r_const = rl_mask;
5287 else
5288 return 0;
5291 /* See if we can find a mode that contains both fields being compared on
5292 the left. If we can't, fail. Otherwise, update all constants and masks
5293 to be relative to a field of that size. */
5294 first_bit = MIN (ll_bitpos, rl_bitpos);
5295 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5296 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5297 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5298 volatilep);
5299 if (lnmode == VOIDmode)
5300 return 0;
5302 lnbitsize = GET_MODE_BITSIZE (lnmode);
5303 lnbitpos = first_bit & ~ (lnbitsize - 1);
5304 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5305 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5307 if (BYTES_BIG_ENDIAN)
5309 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5310 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5313 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5314 size_int (xll_bitpos), 0);
5315 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5316 size_int (xrl_bitpos), 0);
5318 if (l_const)
5320 l_const = fold_convert (lntype, l_const);
5321 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5322 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5323 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5324 fold_build1 (BIT_NOT_EXPR,
5325 lntype, ll_mask),
5326 0)))
5328 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5330 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5333 if (r_const)
5335 r_const = fold_convert (lntype, r_const);
5336 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5337 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5338 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5339 fold_build1 (BIT_NOT_EXPR,
5340 lntype, rl_mask),
5341 0)))
5343 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5345 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5349 /* If the right sides are not constant, do the same for it. Also,
5350 disallow this optimization if a size or signedness mismatch occurs
5351 between the left and right sides. */
5352 if (l_const == 0)
5354 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5355 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5356 /* Make sure the two fields on the right
5357 correspond to the left without being swapped. */
5358 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5359 return 0;
5361 first_bit = MIN (lr_bitpos, rr_bitpos);
5362 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5363 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5364 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5365 volatilep);
5366 if (rnmode == VOIDmode)
5367 return 0;
5369 rnbitsize = GET_MODE_BITSIZE (rnmode);
5370 rnbitpos = first_bit & ~ (rnbitsize - 1);
5371 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5372 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5374 if (BYTES_BIG_ENDIAN)
5376 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5377 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5380 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5381 size_int (xlr_bitpos), 0);
5382 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5383 size_int (xrr_bitpos), 0);
5385 /* Make a mask that corresponds to both fields being compared.
5386 Do this for both items being compared. If the operands are the
5387 same size and the bits being compared are in the same position
5388 then we can do this by masking both and comparing the masked
5389 results. */
5390 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5391 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5392 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5394 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5395 ll_unsignedp || rl_unsignedp);
5396 if (! all_ones_mask_p (ll_mask, lnbitsize))
5397 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5399 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5400 lr_unsignedp || rr_unsignedp);
5401 if (! all_ones_mask_p (lr_mask, rnbitsize))
5402 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5404 return build2 (wanted_code, truth_type, lhs, rhs);
5407 /* There is still another way we can do something: If both pairs of
5408 fields being compared are adjacent, we may be able to make a wider
5409 field containing them both.
5411 Note that we still must mask the lhs/rhs expressions. Furthermore,
5412 the mask must be shifted to account for the shift done by
5413 make_bit_field_ref. */
5414 if ((ll_bitsize + ll_bitpos == rl_bitpos
5415 && lr_bitsize + lr_bitpos == rr_bitpos)
5416 || (ll_bitpos == rl_bitpos + rl_bitsize
5417 && lr_bitpos == rr_bitpos + rr_bitsize))
5419 tree type;
5421 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5422 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5423 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5424 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5426 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5427 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5428 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5429 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5431 /* Convert to the smaller type before masking out unwanted bits. */
5432 type = lntype;
5433 if (lntype != rntype)
5435 if (lnbitsize > rnbitsize)
5437 lhs = fold_convert (rntype, lhs);
5438 ll_mask = fold_convert (rntype, ll_mask);
5439 type = rntype;
5441 else if (lnbitsize < rnbitsize)
5443 rhs = fold_convert (lntype, rhs);
5444 lr_mask = fold_convert (lntype, lr_mask);
5445 type = lntype;
5449 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5450 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5452 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5453 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5455 return build2 (wanted_code, truth_type, lhs, rhs);
5458 return 0;
5461 /* Handle the case of comparisons with constants. If there is something in
5462 common between the masks, those bits of the constants must be the same.
5463 If not, the condition is always false. Test for this to avoid generating
5464 incorrect code below. */
5465 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5466 if (! integer_zerop (result)
5467 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5468 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5470 if (wanted_code == NE_EXPR)
5472 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5473 return constant_boolean_node (true, truth_type);
5475 else
5477 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5478 return constant_boolean_node (false, truth_type);
5482 /* Construct the expression we will return. First get the component
5483 reference we will make. Unless the mask is all ones the width of
5484 that field, perform the mask operation. Then compare with the
5485 merged constant. */
5486 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5487 ll_unsignedp || rl_unsignedp);
5489 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5490 if (! all_ones_mask_p (ll_mask, lnbitsize))
5491 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5493 return build2 (wanted_code, truth_type, result,
5494 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5497 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5498 constant. */
5500 static tree
5501 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5503 tree arg0 = op0;
5504 enum tree_code op_code;
5505 tree comp_const = op1;
5506 tree minmax_const;
5507 int consts_equal, consts_lt;
5508 tree inner;
5510 STRIP_SIGN_NOPS (arg0);
5512 op_code = TREE_CODE (arg0);
5513 minmax_const = TREE_OPERAND (arg0, 1);
5514 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5515 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5516 inner = TREE_OPERAND (arg0, 0);
5518 /* If something does not permit us to optimize, return the original tree. */
5519 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5520 || TREE_CODE (comp_const) != INTEGER_CST
5521 || TREE_OVERFLOW (comp_const)
5522 || TREE_CODE (minmax_const) != INTEGER_CST
5523 || TREE_OVERFLOW (minmax_const))
5524 return NULL_TREE;
5526 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5527 and GT_EXPR, doing the rest with recursive calls using logical
5528 simplifications. */
5529 switch (code)
5531 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5533 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5534 type, op0, op1);
5535 if (tem)
5536 return invert_truthvalue (tem);
5537 return NULL_TREE;
5540 case GE_EXPR:
5541 return
5542 fold_build2 (TRUTH_ORIF_EXPR, type,
5543 optimize_minmax_comparison
5544 (EQ_EXPR, type, arg0, comp_const),
5545 optimize_minmax_comparison
5546 (GT_EXPR, type, arg0, comp_const));
5548 case EQ_EXPR:
5549 if (op_code == MAX_EXPR && consts_equal)
5550 /* MAX (X, 0) == 0 -> X <= 0 */
5551 return fold_build2 (LE_EXPR, type, inner, comp_const);
5553 else if (op_code == MAX_EXPR && consts_lt)
5554 /* MAX (X, 0) == 5 -> X == 5 */
5555 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5557 else if (op_code == MAX_EXPR)
5558 /* MAX (X, 0) == -1 -> false */
5559 return omit_one_operand (type, integer_zero_node, inner);
5561 else if (consts_equal)
5562 /* MIN (X, 0) == 0 -> X >= 0 */
5563 return fold_build2 (GE_EXPR, type, inner, comp_const);
5565 else if (consts_lt)
5566 /* MIN (X, 0) == 5 -> false */
5567 return omit_one_operand (type, integer_zero_node, inner);
5569 else
5570 /* MIN (X, 0) == -1 -> X == -1 */
5571 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5573 case GT_EXPR:
5574 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5575 /* MAX (X, 0) > 0 -> X > 0
5576 MAX (X, 0) > 5 -> X > 5 */
5577 return fold_build2 (GT_EXPR, type, inner, comp_const);
5579 else if (op_code == MAX_EXPR)
5580 /* MAX (X, 0) > -1 -> true */
5581 return omit_one_operand (type, integer_one_node, inner);
5583 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5584 /* MIN (X, 0) > 0 -> false
5585 MIN (X, 0) > 5 -> false */
5586 return omit_one_operand (type, integer_zero_node, inner);
5588 else
5589 /* MIN (X, 0) > -1 -> X > -1 */
5590 return fold_build2 (GT_EXPR, type, inner, comp_const);
5592 default:
5593 return NULL_TREE;
5597 /* T is an integer expression that is being multiplied, divided, or taken a
5598 modulus (CODE says which and what kind of divide or modulus) by a
5599 constant C. See if we can eliminate that operation by folding it with
5600 other operations already in T. WIDE_TYPE, if non-null, is a type that
5601 should be used for the computation if wider than our type.
5603 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5604 (X * 2) + (Y * 4). We must, however, be assured that either the original
5605 expression would not overflow or that overflow is undefined for the type
5606 in the language in question.
5608 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5609 the machine has a multiply-accumulate insn or that this is part of an
5610 addressing calculation.
5612 If we return a non-null expression, it is an equivalent form of the
5613 original computation, but need not be in the original type.
5615 We set *STRICT_OVERFLOW_P to true if the return values depends on
5616 signed overflow being undefined. Otherwise we do not change
5617 *STRICT_OVERFLOW_P. */
5619 static tree
5620 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5621 bool *strict_overflow_p)
5623 /* To avoid exponential search depth, refuse to allow recursion past
5624 three levels. Beyond that (1) it's highly unlikely that we'll find
5625 something interesting and (2) we've probably processed it before
5626 when we built the inner expression. */
5628 static int depth;
5629 tree ret;
5631 if (depth > 3)
5632 return NULL;
5634 depth++;
5635 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5636 depth--;
5638 return ret;
5641 static tree
5642 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5643 bool *strict_overflow_p)
5645 tree type = TREE_TYPE (t);
5646 enum tree_code tcode = TREE_CODE (t);
5647 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5648 > GET_MODE_SIZE (TYPE_MODE (type)))
5649 ? wide_type : type);
5650 tree t1, t2;
5651 int same_p = tcode == code;
5652 tree op0 = NULL_TREE, op1 = NULL_TREE;
5653 bool sub_strict_overflow_p;
5655 /* Don't deal with constants of zero here; they confuse the code below. */
5656 if (integer_zerop (c))
5657 return NULL_TREE;
5659 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5660 op0 = TREE_OPERAND (t, 0);
5662 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5663 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5665 /* Note that we need not handle conditional operations here since fold
5666 already handles those cases. So just do arithmetic here. */
5667 switch (tcode)
5669 case INTEGER_CST:
5670 /* For a constant, we can always simplify if we are a multiply
5671 or (for divide and modulus) if it is a multiple of our constant. */
5672 if (code == MULT_EXPR
5673 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5674 return const_binop (code, fold_convert (ctype, t),
5675 fold_convert (ctype, c), 0);
5676 break;
5678 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5679 /* If op0 is an expression ... */
5680 if ((COMPARISON_CLASS_P (op0)
5681 || UNARY_CLASS_P (op0)
5682 || BINARY_CLASS_P (op0)
5683 || VL_EXP_CLASS_P (op0)
5684 || EXPRESSION_CLASS_P (op0))
5685 /* ... and is unsigned, and its type is smaller than ctype,
5686 then we cannot pass through as widening. */
5687 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5688 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5689 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5690 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5691 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5692 /* ... or this is a truncation (t is narrower than op0),
5693 then we cannot pass through this narrowing. */
5694 || (GET_MODE_SIZE (TYPE_MODE (type))
5695 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5696 /* ... or signedness changes for division or modulus,
5697 then we cannot pass through this conversion. */
5698 || (code != MULT_EXPR
5699 && (TYPE_UNSIGNED (ctype)
5700 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5701 break;
5703 /* Pass the constant down and see if we can make a simplification. If
5704 we can, replace this expression with the inner simplification for
5705 possible later conversion to our or some other type. */
5706 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5707 && TREE_CODE (t2) == INTEGER_CST
5708 && !TREE_OVERFLOW (t2)
5709 && (0 != (t1 = extract_muldiv (op0, t2, code,
5710 code == MULT_EXPR
5711 ? ctype : NULL_TREE,
5712 strict_overflow_p))))
5713 return t1;
5714 break;
5716 case ABS_EXPR:
5717 /* If widening the type changes it from signed to unsigned, then we
5718 must avoid building ABS_EXPR itself as unsigned. */
5719 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5721 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5722 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5723 != 0)
5725 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5726 return fold_convert (ctype, t1);
5728 break;
5730 /* FALLTHROUGH */
5731 case NEGATE_EXPR:
5732 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5733 != 0)
5734 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5735 break;
5737 case MIN_EXPR: case MAX_EXPR:
5738 /* If widening the type changes the signedness, then we can't perform
5739 this optimization as that changes the result. */
5740 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5741 break;
5743 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5744 sub_strict_overflow_p = false;
5745 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5746 &sub_strict_overflow_p)) != 0
5747 && (t2 = extract_muldiv (op1, c, code, wide_type,
5748 &sub_strict_overflow_p)) != 0)
5750 if (tree_int_cst_sgn (c) < 0)
5751 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5752 if (sub_strict_overflow_p)
5753 *strict_overflow_p = true;
5754 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5755 fold_convert (ctype, t2));
5757 break;
5759 case LSHIFT_EXPR: case RSHIFT_EXPR:
5760 /* If the second operand is constant, this is a multiplication
5761 or floor division, by a power of two, so we can treat it that
5762 way unless the multiplier or divisor overflows. Signed
5763 left-shift overflow is implementation-defined rather than
5764 undefined in C90, so do not convert signed left shift into
5765 multiplication. */
5766 if (TREE_CODE (op1) == INTEGER_CST
5767 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5768 /* const_binop may not detect overflow correctly,
5769 so check for it explicitly here. */
5770 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5771 && TREE_INT_CST_HIGH (op1) == 0
5772 && 0 != (t1 = fold_convert (ctype,
5773 const_binop (LSHIFT_EXPR,
5774 size_one_node,
5775 op1, 0)))
5776 && !TREE_OVERFLOW (t1))
5777 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5778 ? MULT_EXPR : FLOOR_DIV_EXPR,
5779 ctype, fold_convert (ctype, op0), t1),
5780 c, code, wide_type, strict_overflow_p);
5781 break;
5783 case PLUS_EXPR: case MINUS_EXPR:
5784 /* See if we can eliminate the operation on both sides. If we can, we
5785 can return a new PLUS or MINUS. If we can't, the only remaining
5786 cases where we can do anything are if the second operand is a
5787 constant. */
5788 sub_strict_overflow_p = false;
5789 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5790 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5791 if (t1 != 0 && t2 != 0
5792 && (code == MULT_EXPR
5793 /* If not multiplication, we can only do this if both operands
5794 are divisible by c. */
5795 || (multiple_of_p (ctype, op0, c)
5796 && multiple_of_p (ctype, op1, c))))
5798 if (sub_strict_overflow_p)
5799 *strict_overflow_p = true;
5800 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5801 fold_convert (ctype, t2));
5804 /* If this was a subtraction, negate OP1 and set it to be an addition.
5805 This simplifies the logic below. */
5806 if (tcode == MINUS_EXPR)
5807 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5809 if (TREE_CODE (op1) != INTEGER_CST)
5810 break;
5812 /* If either OP1 or C are negative, this optimization is not safe for
5813 some of the division and remainder types while for others we need
5814 to change the code. */
5815 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5817 if (code == CEIL_DIV_EXPR)
5818 code = FLOOR_DIV_EXPR;
5819 else if (code == FLOOR_DIV_EXPR)
5820 code = CEIL_DIV_EXPR;
5821 else if (code != MULT_EXPR
5822 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5823 break;
5826 /* If it's a multiply or a division/modulus operation of a multiple
5827 of our constant, do the operation and verify it doesn't overflow. */
5828 if (code == MULT_EXPR
5829 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5831 op1 = const_binop (code, fold_convert (ctype, op1),
5832 fold_convert (ctype, c), 0);
5833 /* We allow the constant to overflow with wrapping semantics. */
5834 if (op1 == 0
5835 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5836 break;
5838 else
5839 break;
5841 /* If we have an unsigned type is not a sizetype, we cannot widen
5842 the operation since it will change the result if the original
5843 computation overflowed. */
5844 if (TYPE_UNSIGNED (ctype)
5845 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5846 && ctype != type)
5847 break;
5849 /* If we were able to eliminate our operation from the first side,
5850 apply our operation to the second side and reform the PLUS. */
5851 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5852 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5854 /* The last case is if we are a multiply. In that case, we can
5855 apply the distributive law to commute the multiply and addition
5856 if the multiplication of the constants doesn't overflow. */
5857 if (code == MULT_EXPR)
5858 return fold_build2 (tcode, ctype,
5859 fold_build2 (code, ctype,
5860 fold_convert (ctype, op0),
5861 fold_convert (ctype, c)),
5862 op1);
5864 break;
5866 case MULT_EXPR:
5867 /* We have a special case here if we are doing something like
5868 (C * 8) % 4 since we know that's zero. */
5869 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5870 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5871 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5872 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5873 return omit_one_operand (type, integer_zero_node, op0);
5875 /* ... fall through ... */
5877 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5878 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5879 /* If we can extract our operation from the LHS, do so and return a
5880 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5881 do something only if the second operand is a constant. */
5882 if (same_p
5883 && (t1 = extract_muldiv (op0, c, code, wide_type,
5884 strict_overflow_p)) != 0)
5885 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5886 fold_convert (ctype, op1));
5887 else if (tcode == MULT_EXPR && code == MULT_EXPR
5888 && (t1 = extract_muldiv (op1, c, code, wide_type,
5889 strict_overflow_p)) != 0)
5890 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5891 fold_convert (ctype, t1));
5892 else if (TREE_CODE (op1) != INTEGER_CST)
5893 return 0;
5895 /* If these are the same operation types, we can associate them
5896 assuming no overflow. */
5897 if (tcode == code
5898 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5899 fold_convert (ctype, c), 0))
5900 && !TREE_OVERFLOW (t1))
5901 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5908 If we have an unsigned type that is not a sizetype, we cannot do
5909 this since it will change the result if the original computation
5910 overflowed. */
5911 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5912 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5913 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5914 || (tcode == MULT_EXPR
5915 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5916 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5918 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5920 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5921 *strict_overflow_p = true;
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5923 fold_convert (ctype,
5924 const_binop (TRUNC_DIV_EXPR,
5925 op1, c, 0)));
5927 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5929 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5930 *strict_overflow_p = true;
5931 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5932 fold_convert (ctype,
5933 const_binop (TRUNC_DIV_EXPR,
5934 c, op1, 0)));
5937 break;
5939 default:
5940 break;
5943 return 0;
5946 /* Return a node which has the indicated constant VALUE (either 0 or
5947 1), and is of the indicated TYPE. */
5949 tree
5950 constant_boolean_node (int value, tree type)
5952 if (type == integer_type_node)
5953 return value ? integer_one_node : integer_zero_node;
5954 else if (type == boolean_type_node)
5955 return value ? boolean_true_node : boolean_false_node;
5956 else
5957 return build_int_cst (type, value);
5961 /* Return true if expr looks like an ARRAY_REF and set base and
5962 offset to the appropriate trees. If there is no offset,
5963 offset is set to NULL_TREE. Base will be canonicalized to
5964 something you can get the element type from using
5965 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5966 in bytes to the base. */
5968 static bool
5969 extract_array_ref (tree expr, tree *base, tree *offset)
5971 /* One canonical form is a PLUS_EXPR with the first
5972 argument being an ADDR_EXPR with a possible NOP_EXPR
5973 attached. */
5974 if (TREE_CODE (expr) == PLUS_EXPR)
5976 tree op0 = TREE_OPERAND (expr, 0);
5977 tree inner_base, dummy1;
5978 /* Strip NOP_EXPRs here because the C frontends and/or
5979 folders present us (int *)&x.a + 4B possibly. */
5980 STRIP_NOPS (op0);
5981 if (extract_array_ref (op0, &inner_base, &dummy1))
5983 *base = inner_base;
5984 if (dummy1 == NULL_TREE)
5985 *offset = TREE_OPERAND (expr, 1);
5986 else
5987 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5988 dummy1, TREE_OPERAND (expr, 1));
5989 return true;
5992 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5993 which we transform into an ADDR_EXPR with appropriate
5994 offset. For other arguments to the ADDR_EXPR we assume
5995 zero offset and as such do not care about the ADDR_EXPR
5996 type and strip possible nops from it. */
5997 else if (TREE_CODE (expr) == ADDR_EXPR)
5999 tree op0 = TREE_OPERAND (expr, 0);
6000 if (TREE_CODE (op0) == ARRAY_REF)
6002 tree idx = TREE_OPERAND (op0, 1);
6003 *base = TREE_OPERAND (op0, 0);
6004 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6005 array_ref_element_size (op0));
6007 else
6009 /* Handle array-to-pointer decay as &a. */
6010 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6011 *base = TREE_OPERAND (expr, 0);
6012 else
6013 *base = expr;
6014 *offset = NULL_TREE;
6016 return true;
6018 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6019 else if (SSA_VAR_P (expr)
6020 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6022 *base = expr;
6023 *offset = NULL_TREE;
6024 return true;
6027 return false;
6031 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6032 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6033 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6034 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6035 COND is the first argument to CODE; otherwise (as in the example
6036 given here), it is the second argument. TYPE is the type of the
6037 original expression. Return NULL_TREE if no simplification is
6038 possible. */
6040 static tree
6041 fold_binary_op_with_conditional_arg (enum tree_code code,
6042 tree type, tree op0, tree op1,
6043 tree cond, tree arg, int cond_first_p)
6045 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6046 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6047 tree test, true_value, false_value;
6048 tree lhs = NULL_TREE;
6049 tree rhs = NULL_TREE;
6051 /* This transformation is only worthwhile if we don't have to wrap
6052 arg in a SAVE_EXPR, and the operation can be simplified on at least
6053 one of the branches once its pushed inside the COND_EXPR. */
6054 if (!TREE_CONSTANT (arg))
6055 return NULL_TREE;
6057 if (TREE_CODE (cond) == COND_EXPR)
6059 test = TREE_OPERAND (cond, 0);
6060 true_value = TREE_OPERAND (cond, 1);
6061 false_value = TREE_OPERAND (cond, 2);
6062 /* If this operand throws an expression, then it does not make
6063 sense to try to perform a logical or arithmetic operation
6064 involving it. */
6065 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6066 lhs = true_value;
6067 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6068 rhs = false_value;
6070 else
6072 tree testtype = TREE_TYPE (cond);
6073 test = cond;
6074 true_value = constant_boolean_node (true, testtype);
6075 false_value = constant_boolean_node (false, testtype);
6078 arg = fold_convert (arg_type, arg);
6079 if (lhs == 0)
6081 true_value = fold_convert (cond_type, true_value);
6082 if (cond_first_p)
6083 lhs = fold_build2 (code, type, true_value, arg);
6084 else
6085 lhs = fold_build2 (code, type, arg, true_value);
6087 if (rhs == 0)
6089 false_value = fold_convert (cond_type, false_value);
6090 if (cond_first_p)
6091 rhs = fold_build2 (code, type, false_value, arg);
6092 else
6093 rhs = fold_build2 (code, type, arg, false_value);
6096 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6097 return fold_convert (type, test);
6101 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6103 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6104 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6105 ADDEND is the same as X.
6107 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6108 and finite. The problematic cases are when X is zero, and its mode
6109 has signed zeros. In the case of rounding towards -infinity,
6110 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6111 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6113 static bool
6114 fold_real_zero_addition_p (tree type, tree addend, int negate)
6116 if (!real_zerop (addend))
6117 return false;
6119 /* Don't allow the fold with -fsignaling-nans. */
6120 if (HONOR_SNANS (TYPE_MODE (type)))
6121 return false;
6123 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6124 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6125 return true;
6127 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6128 if (TREE_CODE (addend) == REAL_CST
6129 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6130 negate = !negate;
6132 /* The mode has signed zeros, and we have to honor their sign.
6133 In this situation, there is only one case we can return true for.
6134 X - 0 is the same as X unless rounding towards -infinity is
6135 supported. */
6136 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6139 /* Subroutine of fold() that checks comparisons of built-in math
6140 functions against real constants.
6142 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6143 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6144 is the type of the result and ARG0 and ARG1 are the operands of the
6145 comparison. ARG1 must be a TREE_REAL_CST.
6147 The function returns the constant folded tree if a simplification
6148 can be made, and NULL_TREE otherwise. */
6150 static tree
6151 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6152 tree type, tree arg0, tree arg1)
6154 REAL_VALUE_TYPE c;
6156 if (BUILTIN_SQRT_P (fcode))
6158 tree arg = CALL_EXPR_ARG (arg0, 0);
6159 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6161 c = TREE_REAL_CST (arg1);
6162 if (REAL_VALUE_NEGATIVE (c))
6164 /* sqrt(x) < y is always false, if y is negative. */
6165 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6166 return omit_one_operand (type, integer_zero_node, arg);
6168 /* sqrt(x) > y is always true, if y is negative and we
6169 don't care about NaNs, i.e. negative values of x. */
6170 if (code == NE_EXPR || !HONOR_NANS (mode))
6171 return omit_one_operand (type, integer_one_node, arg);
6173 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6174 return fold_build2 (GE_EXPR, type, arg,
6175 build_real (TREE_TYPE (arg), dconst0));
6177 else if (code == GT_EXPR || code == GE_EXPR)
6179 REAL_VALUE_TYPE c2;
6181 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6182 real_convert (&c2, mode, &c2);
6184 if (REAL_VALUE_ISINF (c2))
6186 /* sqrt(x) > y is x == +Inf, when y is very large. */
6187 if (HONOR_INFINITIES (mode))
6188 return fold_build2 (EQ_EXPR, type, arg,
6189 build_real (TREE_TYPE (arg), c2));
6191 /* sqrt(x) > y is always false, when y is very large
6192 and we don't care about infinities. */
6193 return omit_one_operand (type, integer_zero_node, arg);
6196 /* sqrt(x) > c is the same as x > c*c. */
6197 return fold_build2 (code, type, arg,
6198 build_real (TREE_TYPE (arg), c2));
6200 else if (code == LT_EXPR || code == LE_EXPR)
6202 REAL_VALUE_TYPE c2;
6204 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6205 real_convert (&c2, mode, &c2);
6207 if (REAL_VALUE_ISINF (c2))
6209 /* sqrt(x) < y is always true, when y is a very large
6210 value and we don't care about NaNs or Infinities. */
6211 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6212 return omit_one_operand (type, integer_one_node, arg);
6214 /* sqrt(x) < y is x != +Inf when y is very large and we
6215 don't care about NaNs. */
6216 if (! HONOR_NANS (mode))
6217 return fold_build2 (NE_EXPR, type, arg,
6218 build_real (TREE_TYPE (arg), c2));
6220 /* sqrt(x) < y is x >= 0 when y is very large and we
6221 don't care about Infinities. */
6222 if (! HONOR_INFINITIES (mode))
6223 return fold_build2 (GE_EXPR, type, arg,
6224 build_real (TREE_TYPE (arg), dconst0));
6226 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6227 if (lang_hooks.decls.global_bindings_p () != 0
6228 || CONTAINS_PLACEHOLDER_P (arg))
6229 return NULL_TREE;
6231 arg = save_expr (arg);
6232 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6233 fold_build2 (GE_EXPR, type, arg,
6234 build_real (TREE_TYPE (arg),
6235 dconst0)),
6236 fold_build2 (NE_EXPR, type, arg,
6237 build_real (TREE_TYPE (arg),
6238 c2)));
6241 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6242 if (! HONOR_NANS (mode))
6243 return fold_build2 (code, type, arg,
6244 build_real (TREE_TYPE (arg), c2));
6246 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6247 if (lang_hooks.decls.global_bindings_p () == 0
6248 && ! CONTAINS_PLACEHOLDER_P (arg))
6250 arg = save_expr (arg);
6251 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6252 fold_build2 (GE_EXPR, type, arg,
6253 build_real (TREE_TYPE (arg),
6254 dconst0)),
6255 fold_build2 (code, type, arg,
6256 build_real (TREE_TYPE (arg),
6257 c2)));
6262 return NULL_TREE;
6265 /* Subroutine of fold() that optimizes comparisons against Infinities,
6266 either +Inf or -Inf.
6268 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6269 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6270 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6272 The function returns the constant folded tree if a simplification
6273 can be made, and NULL_TREE otherwise. */
6275 static tree
6276 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6278 enum machine_mode mode;
6279 REAL_VALUE_TYPE max;
6280 tree temp;
6281 bool neg;
6283 mode = TYPE_MODE (TREE_TYPE (arg0));
6285 /* For negative infinity swap the sense of the comparison. */
6286 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6287 if (neg)
6288 code = swap_tree_comparison (code);
6290 switch (code)
6292 case GT_EXPR:
6293 /* x > +Inf is always false, if with ignore sNANs. */
6294 if (HONOR_SNANS (mode))
6295 return NULL_TREE;
6296 return omit_one_operand (type, integer_zero_node, arg0);
6298 case LE_EXPR:
6299 /* x <= +Inf is always true, if we don't case about NaNs. */
6300 if (! HONOR_NANS (mode))
6301 return omit_one_operand (type, integer_one_node, arg0);
6303 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6304 if (lang_hooks.decls.global_bindings_p () == 0
6305 && ! CONTAINS_PLACEHOLDER_P (arg0))
6307 arg0 = save_expr (arg0);
6308 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6310 break;
6312 case EQ_EXPR:
6313 case GE_EXPR:
6314 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6315 real_maxval (&max, neg, mode);
6316 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6317 arg0, build_real (TREE_TYPE (arg0), max));
6319 case LT_EXPR:
6320 /* x < +Inf is always equal to x <= DBL_MAX. */
6321 real_maxval (&max, neg, mode);
6322 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6323 arg0, build_real (TREE_TYPE (arg0), max));
6325 case NE_EXPR:
6326 /* x != +Inf is always equal to !(x > DBL_MAX). */
6327 real_maxval (&max, neg, mode);
6328 if (! HONOR_NANS (mode))
6329 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6332 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6333 arg0, build_real (TREE_TYPE (arg0), max));
6334 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6336 default:
6337 break;
6340 return NULL_TREE;
6343 /* Subroutine of fold() that optimizes comparisons of a division by
6344 a nonzero integer constant against an integer constant, i.e.
6345 X/C1 op C2.
6347 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6348 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6349 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6351 The function returns the constant folded tree if a simplification
6352 can be made, and NULL_TREE otherwise. */
6354 static tree
6355 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6357 tree prod, tmp, hi, lo;
6358 tree arg00 = TREE_OPERAND (arg0, 0);
6359 tree arg01 = TREE_OPERAND (arg0, 1);
6360 unsigned HOST_WIDE_INT lpart;
6361 HOST_WIDE_INT hpart;
6362 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6363 bool neg_overflow;
6364 int overflow;
6366 /* We have to do this the hard way to detect unsigned overflow.
6367 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6368 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6369 TREE_INT_CST_HIGH (arg01),
6370 TREE_INT_CST_LOW (arg1),
6371 TREE_INT_CST_HIGH (arg1),
6372 &lpart, &hpart, unsigned_p);
6373 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6374 -1, overflow);
6375 neg_overflow = false;
6377 if (unsigned_p)
6379 tmp = int_const_binop (MINUS_EXPR, arg01,
6380 build_int_cst (TREE_TYPE (arg01), 1), 0);
6381 lo = prod;
6383 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6384 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6385 TREE_INT_CST_HIGH (prod),
6386 TREE_INT_CST_LOW (tmp),
6387 TREE_INT_CST_HIGH (tmp),
6388 &lpart, &hpart, unsigned_p);
6389 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6390 -1, overflow | TREE_OVERFLOW (prod));
6392 else if (tree_int_cst_sgn (arg01) >= 0)
6394 tmp = int_const_binop (MINUS_EXPR, arg01,
6395 build_int_cst (TREE_TYPE (arg01), 1), 0);
6396 switch (tree_int_cst_sgn (arg1))
6398 case -1:
6399 neg_overflow = true;
6400 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6401 hi = prod;
6402 break;
6404 case 0:
6405 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6406 hi = tmp;
6407 break;
6409 case 1:
6410 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6411 lo = prod;
6412 break;
6414 default:
6415 gcc_unreachable ();
6418 else
6420 /* A negative divisor reverses the relational operators. */
6421 code = swap_tree_comparison (code);
6423 tmp = int_const_binop (PLUS_EXPR, arg01,
6424 build_int_cst (TREE_TYPE (arg01), 1), 0);
6425 switch (tree_int_cst_sgn (arg1))
6427 case -1:
6428 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6429 lo = prod;
6430 break;
6432 case 0:
6433 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6434 lo = tmp;
6435 break;
6437 case 1:
6438 neg_overflow = true;
6439 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6440 hi = prod;
6441 break;
6443 default:
6444 gcc_unreachable ();
6448 switch (code)
6450 case EQ_EXPR:
6451 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6452 return omit_one_operand (type, integer_zero_node, arg00);
6453 if (TREE_OVERFLOW (hi))
6454 return fold_build2 (GE_EXPR, type, arg00, lo);
6455 if (TREE_OVERFLOW (lo))
6456 return fold_build2 (LE_EXPR, type, arg00, hi);
6457 return build_range_check (type, arg00, 1, lo, hi);
6459 case NE_EXPR:
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand (type, integer_one_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2 (LT_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2 (GT_EXPR, type, arg00, hi);
6466 return build_range_check (type, arg00, 0, lo, hi);
6468 case LT_EXPR:
6469 if (TREE_OVERFLOW (lo))
6471 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6472 return omit_one_operand (type, tmp, arg00);
6474 return fold_build2 (LT_EXPR, type, arg00, lo);
6476 case LE_EXPR:
6477 if (TREE_OVERFLOW (hi))
6479 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 return omit_one_operand (type, tmp, arg00);
6482 return fold_build2 (LE_EXPR, type, arg00, hi);
6484 case GT_EXPR:
6485 if (TREE_OVERFLOW (hi))
6487 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6488 return omit_one_operand (type, tmp, arg00);
6490 return fold_build2 (GT_EXPR, type, arg00, hi);
6492 case GE_EXPR:
6493 if (TREE_OVERFLOW (lo))
6495 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 return omit_one_operand (type, tmp, arg00);
6498 return fold_build2 (GE_EXPR, type, arg00, lo);
6500 default:
6501 break;
6504 return NULL_TREE;
6508 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6509 equality/inequality test, then return a simplified form of the test
6510 using a sign testing. Otherwise return NULL. TYPE is the desired
6511 result type. */
6513 static tree
6514 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6515 tree result_type)
6517 /* If this is testing a single bit, we can optimize the test. */
6518 if ((code == NE_EXPR || code == EQ_EXPR)
6519 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6520 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6522 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6523 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6524 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6526 if (arg00 != NULL_TREE
6527 /* This is only a win if casting to a signed type is cheap,
6528 i.e. when arg00's type is not a partial mode. */
6529 && TYPE_PRECISION (TREE_TYPE (arg00))
6530 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6532 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6533 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6534 result_type, fold_convert (stype, arg00),
6535 build_int_cst (stype, 0));
6539 return NULL_TREE;
6542 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6543 equality/inequality test, then return a simplified form of
6544 the test using shifts and logical operations. Otherwise return
6545 NULL. TYPE is the desired result type. */
6547 tree
6548 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6549 tree result_type)
6551 /* If this is testing a single bit, we can optimize the test. */
6552 if ((code == NE_EXPR || code == EQ_EXPR)
6553 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6554 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6556 tree inner = TREE_OPERAND (arg0, 0);
6557 tree type = TREE_TYPE (arg0);
6558 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6559 enum machine_mode operand_mode = TYPE_MODE (type);
6560 int ops_unsigned;
6561 tree signed_type, unsigned_type, intermediate_type;
6562 tree tem, one;
6564 /* First, see if we can fold the single bit test into a sign-bit
6565 test. */
6566 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6567 result_type);
6568 if (tem)
6569 return tem;
6571 /* Otherwise we have (A & C) != 0 where C is a single bit,
6572 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6573 Similarly for (A & C) == 0. */
6575 /* If INNER is a right shift of a constant and it plus BITNUM does
6576 not overflow, adjust BITNUM and INNER. */
6577 if (TREE_CODE (inner) == RSHIFT_EXPR
6578 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6579 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6580 && bitnum < TYPE_PRECISION (type)
6581 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6582 bitnum - TYPE_PRECISION (type)))
6584 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6585 inner = TREE_OPERAND (inner, 0);
6588 /* If we are going to be able to omit the AND below, we must do our
6589 operations as unsigned. If we must use the AND, we have a choice.
6590 Normally unsigned is faster, but for some machines signed is. */
6591 #ifdef LOAD_EXTEND_OP
6592 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6593 && !flag_syntax_only) ? 0 : 1;
6594 #else
6595 ops_unsigned = 1;
6596 #endif
6598 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6599 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6600 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6601 inner = fold_convert (intermediate_type, inner);
6603 if (bitnum != 0)
6604 inner = build2 (RSHIFT_EXPR, intermediate_type,
6605 inner, size_int (bitnum));
6607 one = build_int_cst (intermediate_type, 1);
6609 if (code == EQ_EXPR)
6610 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6612 /* Put the AND last so it can combine with more things. */
6613 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6615 /* Make sure to return the proper type. */
6616 inner = fold_convert (result_type, inner);
6618 return inner;
6620 return NULL_TREE;
6623 /* Check whether we are allowed to reorder operands arg0 and arg1,
6624 such that the evaluation of arg1 occurs before arg0. */
6626 static bool
6627 reorder_operands_p (tree arg0, tree arg1)
6629 if (! flag_evaluation_order)
6630 return true;
6631 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6632 return true;
6633 return ! TREE_SIDE_EFFECTS (arg0)
6634 && ! TREE_SIDE_EFFECTS (arg1);
6637 /* Test whether it is preferable two swap two operands, ARG0 and
6638 ARG1, for example because ARG0 is an integer constant and ARG1
6639 isn't. If REORDER is true, only recommend swapping if we can
6640 evaluate the operands in reverse order. */
6642 bool
6643 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6645 STRIP_SIGN_NOPS (arg0);
6646 STRIP_SIGN_NOPS (arg1);
6648 if (TREE_CODE (arg1) == INTEGER_CST)
6649 return 0;
6650 if (TREE_CODE (arg0) == INTEGER_CST)
6651 return 1;
6653 if (TREE_CODE (arg1) == REAL_CST)
6654 return 0;
6655 if (TREE_CODE (arg0) == REAL_CST)
6656 return 1;
6658 if (TREE_CODE (arg1) == COMPLEX_CST)
6659 return 0;
6660 if (TREE_CODE (arg0) == COMPLEX_CST)
6661 return 1;
6663 if (TREE_CONSTANT (arg1))
6664 return 0;
6665 if (TREE_CONSTANT (arg0))
6666 return 1;
6668 if (optimize_size)
6669 return 0;
6671 if (reorder && flag_evaluation_order
6672 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6673 return 0;
6675 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6676 for commutative and comparison operators. Ensuring a canonical
6677 form allows the optimizers to find additional redundancies without
6678 having to explicitly check for both orderings. */
6679 if (TREE_CODE (arg0) == SSA_NAME
6680 && TREE_CODE (arg1) == SSA_NAME
6681 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6682 return 1;
6684 /* Put SSA_NAMEs last. */
6685 if (TREE_CODE (arg1) == SSA_NAME)
6686 return 0;
6687 if (TREE_CODE (arg0) == SSA_NAME)
6688 return 1;
6690 /* Put variables last. */
6691 if (DECL_P (arg1))
6692 return 0;
6693 if (DECL_P (arg0))
6694 return 1;
6696 return 0;
6699 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6700 ARG0 is extended to a wider type. */
6702 static tree
6703 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6705 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6706 tree arg1_unw;
6707 tree shorter_type, outer_type;
6708 tree min, max;
6709 bool above, below;
6711 if (arg0_unw == arg0)
6712 return NULL_TREE;
6713 shorter_type = TREE_TYPE (arg0_unw);
6715 #ifdef HAVE_canonicalize_funcptr_for_compare
6716 /* Disable this optimization if we're casting a function pointer
6717 type on targets that require function pointer canonicalization. */
6718 if (HAVE_canonicalize_funcptr_for_compare
6719 && TREE_CODE (shorter_type) == POINTER_TYPE
6720 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6721 return NULL_TREE;
6722 #endif
6724 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6725 return NULL_TREE;
6727 arg1_unw = get_unwidened (arg1, shorter_type);
6729 /* If possible, express the comparison in the shorter mode. */
6730 if ((code == EQ_EXPR || code == NE_EXPR
6731 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6732 && (TREE_TYPE (arg1_unw) == shorter_type
6733 || (TREE_CODE (arg1_unw) == INTEGER_CST
6734 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6735 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6736 && int_fits_type_p (arg1_unw, shorter_type))))
6737 return fold_build2 (code, type, arg0_unw,
6738 fold_convert (shorter_type, arg1_unw));
6740 if (TREE_CODE (arg1_unw) != INTEGER_CST
6741 || TREE_CODE (shorter_type) != INTEGER_TYPE
6742 || !int_fits_type_p (arg1_unw, shorter_type))
6743 return NULL_TREE;
6745 /* If we are comparing with the integer that does not fit into the range
6746 of the shorter type, the result is known. */
6747 outer_type = TREE_TYPE (arg1_unw);
6748 min = lower_bound_in_type (outer_type, shorter_type);
6749 max = upper_bound_in_type (outer_type, shorter_type);
6751 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6752 max, arg1_unw));
6753 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6754 arg1_unw, min));
6756 switch (code)
6758 case EQ_EXPR:
6759 if (above || below)
6760 return omit_one_operand (type, integer_zero_node, arg0);
6761 break;
6763 case NE_EXPR:
6764 if (above || below)
6765 return omit_one_operand (type, integer_one_node, arg0);
6766 break;
6768 case LT_EXPR:
6769 case LE_EXPR:
6770 if (above)
6771 return omit_one_operand (type, integer_one_node, arg0);
6772 else if (below)
6773 return omit_one_operand (type, integer_zero_node, arg0);
6775 case GT_EXPR:
6776 case GE_EXPR:
6777 if (above)
6778 return omit_one_operand (type, integer_zero_node, arg0);
6779 else if (below)
6780 return omit_one_operand (type, integer_one_node, arg0);
6782 default:
6783 break;
6786 return NULL_TREE;
6789 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6790 ARG0 just the signedness is changed. */
6792 static tree
6793 fold_sign_changed_comparison (enum tree_code code, tree type,
6794 tree arg0, tree arg1)
6796 tree arg0_inner;
6797 tree inner_type, outer_type;
6799 if (TREE_CODE (arg0) != NOP_EXPR
6800 && TREE_CODE (arg0) != CONVERT_EXPR)
6801 return NULL_TREE;
6803 outer_type = TREE_TYPE (arg0);
6804 arg0_inner = TREE_OPERAND (arg0, 0);
6805 inner_type = TREE_TYPE (arg0_inner);
6807 #ifdef HAVE_canonicalize_funcptr_for_compare
6808 /* Disable this optimization if we're casting a function pointer
6809 type on targets that require function pointer canonicalization. */
6810 if (HAVE_canonicalize_funcptr_for_compare
6811 && TREE_CODE (inner_type) == POINTER_TYPE
6812 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6813 return NULL_TREE;
6814 #endif
6816 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6817 return NULL_TREE;
6819 if (TREE_CODE (arg1) != INTEGER_CST
6820 && !((TREE_CODE (arg1) == NOP_EXPR
6821 || TREE_CODE (arg1) == CONVERT_EXPR)
6822 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6823 return NULL_TREE;
6825 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6826 && code != NE_EXPR
6827 && code != EQ_EXPR)
6828 return NULL_TREE;
6830 if (TREE_CODE (arg1) == INTEGER_CST)
6831 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6832 TREE_INT_CST_HIGH (arg1), 0,
6833 TREE_OVERFLOW (arg1));
6834 else
6835 arg1 = fold_convert (inner_type, arg1);
6837 return fold_build2 (code, type, arg0_inner, arg1);
6840 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6841 step of the array. Reconstructs s and delta in the case of s * delta
6842 being an integer constant (and thus already folded).
6843 ADDR is the address. MULT is the multiplicative expression.
6844 If the function succeeds, the new address expression is returned. Otherwise
6845 NULL_TREE is returned. */
6847 static tree
6848 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6850 tree s, delta, step;
6851 tree ref = TREE_OPERAND (addr, 0), pref;
6852 tree ret, pos;
6853 tree itype;
6854 bool mdim = false;
6856 /* Canonicalize op1 into a possibly non-constant delta
6857 and an INTEGER_CST s. */
6858 if (TREE_CODE (op1) == MULT_EXPR)
6860 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6862 STRIP_NOPS (arg0);
6863 STRIP_NOPS (arg1);
6865 if (TREE_CODE (arg0) == INTEGER_CST)
6867 s = arg0;
6868 delta = arg1;
6870 else if (TREE_CODE (arg1) == INTEGER_CST)
6872 s = arg1;
6873 delta = arg0;
6875 else
6876 return NULL_TREE;
6878 else if (TREE_CODE (op1) == INTEGER_CST)
6880 delta = op1;
6881 s = NULL_TREE;
6883 else
6885 /* Simulate we are delta * 1. */
6886 delta = op1;
6887 s = integer_one_node;
6890 for (;; ref = TREE_OPERAND (ref, 0))
6892 if (TREE_CODE (ref) == ARRAY_REF)
6894 /* Remember if this was a multi-dimensional array. */
6895 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6896 mdim = true;
6898 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6899 if (! itype)
6900 continue;
6902 step = array_ref_element_size (ref);
6903 if (TREE_CODE (step) != INTEGER_CST)
6904 continue;
6906 if (s)
6908 if (! tree_int_cst_equal (step, s))
6909 continue;
6911 else
6913 /* Try if delta is a multiple of step. */
6914 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6915 if (! tmp)
6916 continue;
6917 delta = tmp;
6920 /* Only fold here if we can verify we do not overflow one
6921 dimension of a multi-dimensional array. */
6922 if (mdim)
6924 tree tmp;
6926 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6927 || !INTEGRAL_TYPE_P (itype)
6928 || !TYPE_MAX_VALUE (itype)
6929 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6930 continue;
6932 tmp = fold_binary (code, itype,
6933 fold_convert (itype,
6934 TREE_OPERAND (ref, 1)),
6935 fold_convert (itype, delta));
6936 if (!tmp
6937 || TREE_CODE (tmp) != INTEGER_CST
6938 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6939 continue;
6942 break;
6944 else
6945 mdim = false;
6947 if (!handled_component_p (ref))
6948 return NULL_TREE;
6951 /* We found the suitable array reference. So copy everything up to it,
6952 and replace the index. */
6954 pref = TREE_OPERAND (addr, 0);
6955 ret = copy_node (pref);
6956 pos = ret;
6958 while (pref != ref)
6960 pref = TREE_OPERAND (pref, 0);
6961 TREE_OPERAND (pos, 0) = copy_node (pref);
6962 pos = TREE_OPERAND (pos, 0);
6965 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6966 fold_convert (itype,
6967 TREE_OPERAND (pos, 1)),
6968 fold_convert (itype, delta));
6970 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6974 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6975 means A >= Y && A != MAX, but in this case we know that
6976 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6978 static tree
6979 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6981 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6983 if (TREE_CODE (bound) == LT_EXPR)
6984 a = TREE_OPERAND (bound, 0);
6985 else if (TREE_CODE (bound) == GT_EXPR)
6986 a = TREE_OPERAND (bound, 1);
6987 else
6988 return NULL_TREE;
6990 typea = TREE_TYPE (a);
6991 if (!INTEGRAL_TYPE_P (typea)
6992 && !POINTER_TYPE_P (typea))
6993 return NULL_TREE;
6995 if (TREE_CODE (ineq) == LT_EXPR)
6997 a1 = TREE_OPERAND (ineq, 1);
6998 y = TREE_OPERAND (ineq, 0);
7000 else if (TREE_CODE (ineq) == GT_EXPR)
7002 a1 = TREE_OPERAND (ineq, 0);
7003 y = TREE_OPERAND (ineq, 1);
7005 else
7006 return NULL_TREE;
7008 if (TREE_TYPE (a1) != typea)
7009 return NULL_TREE;
7011 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
7012 if (!integer_onep (diff))
7013 return NULL_TREE;
7015 return fold_build2 (GE_EXPR, type, a, y);
7018 /* Fold a sum or difference of at least one multiplication.
7019 Returns the folded tree or NULL if no simplification could be made. */
7021 static tree
7022 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7024 tree arg00, arg01, arg10, arg11;
7025 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7027 /* (A * C) +- (B * C) -> (A+-B) * C.
7028 (A * C) +- A -> A * (C+-1).
7029 We are most concerned about the case where C is a constant,
7030 but other combinations show up during loop reduction. Since
7031 it is not difficult, try all four possibilities. */
7033 if (TREE_CODE (arg0) == MULT_EXPR)
7035 arg00 = TREE_OPERAND (arg0, 0);
7036 arg01 = TREE_OPERAND (arg0, 1);
7038 else
7040 arg00 = arg0;
7041 arg01 = build_one_cst (type);
7043 if (TREE_CODE (arg1) == MULT_EXPR)
7045 arg10 = TREE_OPERAND (arg1, 0);
7046 arg11 = TREE_OPERAND (arg1, 1);
7048 else
7050 arg10 = arg1;
7051 arg11 = build_one_cst (type);
7053 same = NULL_TREE;
7055 if (operand_equal_p (arg01, arg11, 0))
7056 same = arg01, alt0 = arg00, alt1 = arg10;
7057 else if (operand_equal_p (arg00, arg10, 0))
7058 same = arg00, alt0 = arg01, alt1 = arg11;
7059 else if (operand_equal_p (arg00, arg11, 0))
7060 same = arg00, alt0 = arg01, alt1 = arg10;
7061 else if (operand_equal_p (arg01, arg10, 0))
7062 same = arg01, alt0 = arg00, alt1 = arg11;
7064 /* No identical multiplicands; see if we can find a common
7065 power-of-two factor in non-power-of-two multiplies. This
7066 can help in multi-dimensional array access. */
7067 else if (host_integerp (arg01, 0)
7068 && host_integerp (arg11, 0))
7070 HOST_WIDE_INT int01, int11, tmp;
7071 bool swap = false;
7072 tree maybe_same;
7073 int01 = TREE_INT_CST_LOW (arg01);
7074 int11 = TREE_INT_CST_LOW (arg11);
7076 /* Move min of absolute values to int11. */
7077 if ((int01 >= 0 ? int01 : -int01)
7078 < (int11 >= 0 ? int11 : -int11))
7080 tmp = int01, int01 = int11, int11 = tmp;
7081 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7082 maybe_same = arg01;
7083 swap = true;
7085 else
7086 maybe_same = arg11;
7088 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7090 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7091 build_int_cst (TREE_TYPE (arg00),
7092 int01 / int11));
7093 alt1 = arg10;
7094 same = maybe_same;
7095 if (swap)
7096 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7100 if (same)
7101 return fold_build2 (MULT_EXPR, type,
7102 fold_build2 (code, type,
7103 fold_convert (type, alt0),
7104 fold_convert (type, alt1)),
7105 fold_convert (type, same));
7107 return NULL_TREE;
7110 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7111 specified by EXPR into the buffer PTR of length LEN bytes.
7112 Return the number of bytes placed in the buffer, or zero
7113 upon failure. */
7115 static int
7116 native_encode_int (tree expr, unsigned char *ptr, int len)
7118 tree type = TREE_TYPE (expr);
7119 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7120 int byte, offset, word, words;
7121 unsigned char value;
7123 if (total_bytes > len)
7124 return 0;
7125 words = total_bytes / UNITS_PER_WORD;
7127 for (byte = 0; byte < total_bytes; byte++)
7129 int bitpos = byte * BITS_PER_UNIT;
7130 if (bitpos < HOST_BITS_PER_WIDE_INT)
7131 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7132 else
7133 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7134 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7136 if (total_bytes > UNITS_PER_WORD)
7138 word = byte / UNITS_PER_WORD;
7139 if (WORDS_BIG_ENDIAN)
7140 word = (words - 1) - word;
7141 offset = word * UNITS_PER_WORD;
7142 if (BYTES_BIG_ENDIAN)
7143 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7144 else
7145 offset += byte % UNITS_PER_WORD;
7147 else
7148 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7149 ptr[offset] = value;
7151 return total_bytes;
7155 /* Subroutine of native_encode_expr. Encode the REAL_CST
7156 specified by EXPR into the buffer PTR of length LEN bytes.
7157 Return the number of bytes placed in the buffer, or zero
7158 upon failure. */
7160 static int
7161 native_encode_real (tree expr, unsigned char *ptr, int len)
7163 tree type = TREE_TYPE (expr);
7164 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7165 int byte, offset, word, words;
7166 unsigned char value;
7168 /* There are always 32 bits in each long, no matter the size of
7169 the hosts long. We handle floating point representations with
7170 up to 192 bits. */
7171 long tmp[6];
7173 if (total_bytes > len)
7174 return 0;
7175 words = total_bytes / UNITS_PER_WORD;
7177 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7179 for (byte = 0; byte < total_bytes; byte++)
7181 int bitpos = byte * BITS_PER_UNIT;
7182 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7184 if (total_bytes > UNITS_PER_WORD)
7186 word = byte / UNITS_PER_WORD;
7187 if (FLOAT_WORDS_BIG_ENDIAN)
7188 word = (words - 1) - word;
7189 offset = word * UNITS_PER_WORD;
7190 if (BYTES_BIG_ENDIAN)
7191 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7192 else
7193 offset += byte % UNITS_PER_WORD;
7195 else
7196 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7197 ptr[offset] = value;
7199 return total_bytes;
7202 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7203 specified by EXPR into the buffer PTR of length LEN bytes.
7204 Return the number of bytes placed in the buffer, or zero
7205 upon failure. */
7207 static int
7208 native_encode_complex (tree expr, unsigned char *ptr, int len)
7210 int rsize, isize;
7211 tree part;
7213 part = TREE_REALPART (expr);
7214 rsize = native_encode_expr (part, ptr, len);
7215 if (rsize == 0)
7216 return 0;
7217 part = TREE_IMAGPART (expr);
7218 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7219 if (isize != rsize)
7220 return 0;
7221 return rsize + isize;
7225 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7226 specified by EXPR into the buffer PTR of length LEN bytes.
7227 Return the number of bytes placed in the buffer, or zero
7228 upon failure. */
7230 static int
7231 native_encode_vector (tree expr, unsigned char *ptr, int len)
7233 int i, size, offset, count;
7234 tree itype, elem, elements;
7236 offset = 0;
7237 elements = TREE_VECTOR_CST_ELTS (expr);
7238 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7239 itype = TREE_TYPE (TREE_TYPE (expr));
7240 size = GET_MODE_SIZE (TYPE_MODE (itype));
7241 for (i = 0; i < count; i++)
7243 if (elements)
7245 elem = TREE_VALUE (elements);
7246 elements = TREE_CHAIN (elements);
7248 else
7249 elem = NULL_TREE;
7251 if (elem)
7253 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7254 return 0;
7256 else
7258 if (offset + size > len)
7259 return 0;
7260 memset (ptr+offset, 0, size);
7262 offset += size;
7264 return offset;
7268 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7269 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7270 buffer PTR of length LEN bytes. Return the number of bytes
7271 placed in the buffer, or zero upon failure. */
7273 static int
7274 native_encode_expr (tree expr, unsigned char *ptr, int len)
7276 switch (TREE_CODE (expr))
7278 case INTEGER_CST:
7279 return native_encode_int (expr, ptr, len);
7281 case REAL_CST:
7282 return native_encode_real (expr, ptr, len);
7284 case COMPLEX_CST:
7285 return native_encode_complex (expr, ptr, len);
7287 case VECTOR_CST:
7288 return native_encode_vector (expr, ptr, len);
7290 default:
7291 return 0;
7296 /* Subroutine of native_interpret_expr. Interpret the contents of
7297 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7298 If the buffer cannot be interpreted, return NULL_TREE. */
7300 static tree
7301 native_interpret_int (tree type, unsigned char *ptr, int len)
7303 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7304 int byte, offset, word, words;
7305 unsigned char value;
7306 unsigned int HOST_WIDE_INT lo = 0;
7307 HOST_WIDE_INT hi = 0;
7309 if (total_bytes > len)
7310 return NULL_TREE;
7311 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7312 return NULL_TREE;
7313 words = total_bytes / UNITS_PER_WORD;
7315 for (byte = 0; byte < total_bytes; byte++)
7317 int bitpos = byte * BITS_PER_UNIT;
7318 if (total_bytes > UNITS_PER_WORD)
7320 word = byte / UNITS_PER_WORD;
7321 if (WORDS_BIG_ENDIAN)
7322 word = (words - 1) - word;
7323 offset = word * UNITS_PER_WORD;
7324 if (BYTES_BIG_ENDIAN)
7325 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7326 else
7327 offset += byte % UNITS_PER_WORD;
7329 else
7330 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7331 value = ptr[offset];
7333 if (bitpos < HOST_BITS_PER_WIDE_INT)
7334 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7335 else
7336 hi |= (unsigned HOST_WIDE_INT) value
7337 << (bitpos - HOST_BITS_PER_WIDE_INT);
7340 return build_int_cst_wide_type (type, lo, hi);
7344 /* Subroutine of native_interpret_expr. Interpret the contents of
7345 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7346 If the buffer cannot be interpreted, return NULL_TREE. */
7348 static tree
7349 native_interpret_real (tree type, unsigned char *ptr, int len)
7351 enum machine_mode mode = TYPE_MODE (type);
7352 int total_bytes = GET_MODE_SIZE (mode);
7353 int byte, offset, word, words;
7354 unsigned char value;
7355 /* There are always 32 bits in each long, no matter the size of
7356 the hosts long. We handle floating point representations with
7357 up to 192 bits. */
7358 REAL_VALUE_TYPE r;
7359 long tmp[6];
7361 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7362 if (total_bytes > len || total_bytes > 24)
7363 return NULL_TREE;
7364 words = total_bytes / UNITS_PER_WORD;
7366 memset (tmp, 0, sizeof (tmp));
7367 for (byte = 0; byte < total_bytes; byte++)
7369 int bitpos = byte * BITS_PER_UNIT;
7370 if (total_bytes > UNITS_PER_WORD)
7372 word = byte / UNITS_PER_WORD;
7373 if (FLOAT_WORDS_BIG_ENDIAN)
7374 word = (words - 1) - word;
7375 offset = word * UNITS_PER_WORD;
7376 if (BYTES_BIG_ENDIAN)
7377 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7378 else
7379 offset += byte % UNITS_PER_WORD;
7381 else
7382 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7383 value = ptr[offset];
7385 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7388 real_from_target (&r, tmp, mode);
7389 return build_real (type, r);
7393 /* Subroutine of native_interpret_expr. Interpret the contents of
7394 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7395 If the buffer cannot be interpreted, return NULL_TREE. */
7397 static tree
7398 native_interpret_complex (tree type, unsigned char *ptr, int len)
7400 tree etype, rpart, ipart;
7401 int size;
7403 etype = TREE_TYPE (type);
7404 size = GET_MODE_SIZE (TYPE_MODE (etype));
7405 if (size * 2 > len)
7406 return NULL_TREE;
7407 rpart = native_interpret_expr (etype, ptr, size);
7408 if (!rpart)
7409 return NULL_TREE;
7410 ipart = native_interpret_expr (etype, ptr+size, size);
7411 if (!ipart)
7412 return NULL_TREE;
7413 return build_complex (type, rpart, ipart);
7417 /* Subroutine of native_interpret_expr. Interpret the contents of
7418 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7419 If the buffer cannot be interpreted, return NULL_TREE. */
7421 static tree
7422 native_interpret_vector (tree type, unsigned char *ptr, int len)
7424 tree etype, elem, elements;
7425 int i, size, count;
7427 etype = TREE_TYPE (type);
7428 size = GET_MODE_SIZE (TYPE_MODE (etype));
7429 count = TYPE_VECTOR_SUBPARTS (type);
7430 if (size * count > len)
7431 return NULL_TREE;
7433 elements = NULL_TREE;
7434 for (i = count - 1; i >= 0; i--)
7436 elem = native_interpret_expr (etype, ptr+(i*size), size);
7437 if (!elem)
7438 return NULL_TREE;
7439 elements = tree_cons (NULL_TREE, elem, elements);
7441 return build_vector (type, elements);
7445 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7446 the buffer PTR of length LEN as a constant of type TYPE. For
7447 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7448 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7449 return NULL_TREE. */
7451 static tree
7452 native_interpret_expr (tree type, unsigned char *ptr, int len)
7454 switch (TREE_CODE (type))
7456 case INTEGER_TYPE:
7457 case ENUMERAL_TYPE:
7458 case BOOLEAN_TYPE:
7459 return native_interpret_int (type, ptr, len);
7461 case REAL_TYPE:
7462 return native_interpret_real (type, ptr, len);
7464 case COMPLEX_TYPE:
7465 return native_interpret_complex (type, ptr, len);
7467 case VECTOR_TYPE:
7468 return native_interpret_vector (type, ptr, len);
7470 default:
7471 return NULL_TREE;
7476 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7477 TYPE at compile-time. If we're unable to perform the conversion
7478 return NULL_TREE. */
7480 static tree
7481 fold_view_convert_expr (tree type, tree expr)
7483 /* We support up to 512-bit values (for V8DFmode). */
7484 unsigned char buffer[64];
7485 int len;
7487 /* Check that the host and target are sane. */
7488 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7489 return NULL_TREE;
7491 len = native_encode_expr (expr, buffer, sizeof (buffer));
7492 if (len == 0)
7493 return NULL_TREE;
7495 return native_interpret_expr (type, buffer, len);
7499 /* Fold a unary expression of code CODE and type TYPE with operand
7500 OP0. Return the folded expression if folding is successful.
7501 Otherwise, return NULL_TREE. */
7503 tree
7504 fold_unary (enum tree_code code, tree type, tree op0)
7506 tree tem;
7507 tree arg0;
7508 enum tree_code_class kind = TREE_CODE_CLASS (code);
7510 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7511 && TREE_CODE_LENGTH (code) == 1);
7513 arg0 = op0;
7514 if (arg0)
7516 if (code == NOP_EXPR || code == CONVERT_EXPR
7517 || code == FLOAT_EXPR || code == ABS_EXPR)
7519 /* Don't use STRIP_NOPS, because signedness of argument type
7520 matters. */
7521 STRIP_SIGN_NOPS (arg0);
7523 else
7525 /* Strip any conversions that don't change the mode. This
7526 is safe for every expression, except for a comparison
7527 expression because its signedness is derived from its
7528 operands.
7530 Note that this is done as an internal manipulation within
7531 the constant folder, in order to find the simplest
7532 representation of the arguments so that their form can be
7533 studied. In any cases, the appropriate type conversions
7534 should be put back in the tree that will get out of the
7535 constant folder. */
7536 STRIP_NOPS (arg0);
7540 if (TREE_CODE_CLASS (code) == tcc_unary)
7542 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7543 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7544 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7545 else if (TREE_CODE (arg0) == COND_EXPR)
7547 tree arg01 = TREE_OPERAND (arg0, 1);
7548 tree arg02 = TREE_OPERAND (arg0, 2);
7549 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7550 arg01 = fold_build1 (code, type, arg01);
7551 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7552 arg02 = fold_build1 (code, type, arg02);
7553 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7554 arg01, arg02);
7556 /* If this was a conversion, and all we did was to move into
7557 inside the COND_EXPR, bring it back out. But leave it if
7558 it is a conversion from integer to integer and the
7559 result precision is no wider than a word since such a
7560 conversion is cheap and may be optimized away by combine,
7561 while it couldn't if it were outside the COND_EXPR. Then return
7562 so we don't get into an infinite recursion loop taking the
7563 conversion out and then back in. */
7565 if ((code == NOP_EXPR || code == CONVERT_EXPR
7566 || code == NON_LVALUE_EXPR)
7567 && TREE_CODE (tem) == COND_EXPR
7568 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7569 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7570 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7571 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7572 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7573 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7574 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7575 && (INTEGRAL_TYPE_P
7576 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7577 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7578 || flag_syntax_only))
7579 tem = build1 (code, type,
7580 build3 (COND_EXPR,
7581 TREE_TYPE (TREE_OPERAND
7582 (TREE_OPERAND (tem, 1), 0)),
7583 TREE_OPERAND (tem, 0),
7584 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7585 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7586 return tem;
7588 else if (COMPARISON_CLASS_P (arg0))
7590 if (TREE_CODE (type) == BOOLEAN_TYPE)
7592 arg0 = copy_node (arg0);
7593 TREE_TYPE (arg0) = type;
7594 return arg0;
7596 else if (TREE_CODE (type) != INTEGER_TYPE)
7597 return fold_build3 (COND_EXPR, type, arg0,
7598 fold_build1 (code, type,
7599 integer_one_node),
7600 fold_build1 (code, type,
7601 integer_zero_node));
7605 switch (code)
7607 case NOP_EXPR:
7608 case FLOAT_EXPR:
7609 case CONVERT_EXPR:
7610 case FIX_TRUNC_EXPR:
7611 if (TREE_TYPE (op0) == type)
7612 return op0;
7614 /* If we have (type) (a CMP b) and type is an integral type, return
7615 new expression involving the new type. */
7616 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7617 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7618 TREE_OPERAND (op0, 1));
7620 /* Handle cases of two conversions in a row. */
7621 if (TREE_CODE (op0) == NOP_EXPR
7622 || TREE_CODE (op0) == CONVERT_EXPR)
7624 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7625 tree inter_type = TREE_TYPE (op0);
7626 int inside_int = INTEGRAL_TYPE_P (inside_type);
7627 int inside_ptr = POINTER_TYPE_P (inside_type);
7628 int inside_float = FLOAT_TYPE_P (inside_type);
7629 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7630 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7631 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7632 int inter_int = INTEGRAL_TYPE_P (inter_type);
7633 int inter_ptr = POINTER_TYPE_P (inter_type);
7634 int inter_float = FLOAT_TYPE_P (inter_type);
7635 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7636 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7637 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7638 int final_int = INTEGRAL_TYPE_P (type);
7639 int final_ptr = POINTER_TYPE_P (type);
7640 int final_float = FLOAT_TYPE_P (type);
7641 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7642 unsigned int final_prec = TYPE_PRECISION (type);
7643 int final_unsignedp = TYPE_UNSIGNED (type);
7645 /* In addition to the cases of two conversions in a row
7646 handled below, if we are converting something to its own
7647 type via an object of identical or wider precision, neither
7648 conversion is needed. */
7649 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7650 && (((inter_int || inter_ptr) && final_int)
7651 || (inter_float && final_float))
7652 && inter_prec >= final_prec)
7653 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7655 /* Likewise, if the intermediate and final types are either both
7656 float or both integer, we don't need the middle conversion if
7657 it is wider than the final type and doesn't change the signedness
7658 (for integers). Avoid this if the final type is a pointer
7659 since then we sometimes need the inner conversion. Likewise if
7660 the outer has a precision not equal to the size of its mode. */
7661 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7662 || (inter_float && inside_float)
7663 || (inter_vec && inside_vec))
7664 && inter_prec >= inside_prec
7665 && (inter_float || inter_vec
7666 || inter_unsignedp == inside_unsignedp)
7667 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7668 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7669 && ! final_ptr
7670 && (! final_vec || inter_prec == inside_prec))
7671 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7673 /* If we have a sign-extension of a zero-extended value, we can
7674 replace that by a single zero-extension. */
7675 if (inside_int && inter_int && final_int
7676 && inside_prec < inter_prec && inter_prec < final_prec
7677 && inside_unsignedp && !inter_unsignedp)
7678 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7680 /* Two conversions in a row are not needed unless:
7681 - some conversion is floating-point (overstrict for now), or
7682 - some conversion is a vector (overstrict for now), or
7683 - the intermediate type is narrower than both initial and
7684 final, or
7685 - the intermediate type and innermost type differ in signedness,
7686 and the outermost type is wider than the intermediate, or
7687 - the initial type is a pointer type and the precisions of the
7688 intermediate and final types differ, or
7689 - the final type is a pointer type and the precisions of the
7690 initial and intermediate types differ.
7691 - the final type is a pointer type and the initial type not
7692 - the initial type is a pointer to an array and the final type
7693 not. */
7694 if (! inside_float && ! inter_float && ! final_float
7695 && ! inside_vec && ! inter_vec && ! final_vec
7696 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7697 && ! (inside_int && inter_int
7698 && inter_unsignedp != inside_unsignedp
7699 && inter_prec < final_prec)
7700 && ((inter_unsignedp && inter_prec > inside_prec)
7701 == (final_unsignedp && final_prec > inter_prec))
7702 && ! (inside_ptr && inter_prec != final_prec)
7703 && ! (final_ptr && inside_prec != inter_prec)
7704 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7705 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7706 && final_ptr == inside_ptr
7707 && ! (inside_ptr
7708 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7709 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7710 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7713 /* Handle (T *)&A.B.C for A being of type T and B and C
7714 living at offset zero. This occurs frequently in
7715 C++ upcasting and then accessing the base. */
7716 if (TREE_CODE (op0) == ADDR_EXPR
7717 && POINTER_TYPE_P (type)
7718 && handled_component_p (TREE_OPERAND (op0, 0)))
7720 HOST_WIDE_INT bitsize, bitpos;
7721 tree offset;
7722 enum machine_mode mode;
7723 int unsignedp, volatilep;
7724 tree base = TREE_OPERAND (op0, 0);
7725 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7726 &mode, &unsignedp, &volatilep, false);
7727 /* If the reference was to a (constant) zero offset, we can use
7728 the address of the base if it has the same base type
7729 as the result type. */
7730 if (! offset && bitpos == 0
7731 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7732 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7733 return fold_convert (type, build_fold_addr_expr (base));
7736 if ((TREE_CODE (op0) == MODIFY_EXPR
7737 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7738 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7739 /* Detect assigning a bitfield. */
7740 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7741 && DECL_BIT_FIELD
7742 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7744 /* Don't leave an assignment inside a conversion
7745 unless assigning a bitfield. */
7746 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7747 /* First do the assignment, then return converted constant. */
7748 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7749 TREE_NO_WARNING (tem) = 1;
7750 TREE_USED (tem) = 1;
7751 return tem;
7754 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7755 constants (if x has signed type, the sign bit cannot be set
7756 in c). This folds extension into the BIT_AND_EXPR. */
7757 if (INTEGRAL_TYPE_P (type)
7758 && TREE_CODE (type) != BOOLEAN_TYPE
7759 && TREE_CODE (op0) == BIT_AND_EXPR
7760 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7762 tree and = op0;
7763 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7764 int change = 0;
7766 if (TYPE_UNSIGNED (TREE_TYPE (and))
7767 || (TYPE_PRECISION (type)
7768 <= TYPE_PRECISION (TREE_TYPE (and))))
7769 change = 1;
7770 else if (TYPE_PRECISION (TREE_TYPE (and1))
7771 <= HOST_BITS_PER_WIDE_INT
7772 && host_integerp (and1, 1))
7774 unsigned HOST_WIDE_INT cst;
7776 cst = tree_low_cst (and1, 1);
7777 cst &= (HOST_WIDE_INT) -1
7778 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7779 change = (cst == 0);
7780 #ifdef LOAD_EXTEND_OP
7781 if (change
7782 && !flag_syntax_only
7783 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7784 == ZERO_EXTEND))
7786 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7787 and0 = fold_convert (uns, and0);
7788 and1 = fold_convert (uns, and1);
7790 #endif
7792 if (change)
7794 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7795 TREE_INT_CST_HIGH (and1), 0,
7796 TREE_OVERFLOW (and1));
7797 return fold_build2 (BIT_AND_EXPR, type,
7798 fold_convert (type, and0), tem);
7802 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7803 T2 being pointers to types of the same size. */
7804 if (POINTER_TYPE_P (type)
7805 && BINARY_CLASS_P (arg0)
7806 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7807 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7809 tree arg00 = TREE_OPERAND (arg0, 0);
7810 tree t0 = type;
7811 tree t1 = TREE_TYPE (arg00);
7812 tree tt0 = TREE_TYPE (t0);
7813 tree tt1 = TREE_TYPE (t1);
7814 tree s0 = TYPE_SIZE (tt0);
7815 tree s1 = TYPE_SIZE (tt1);
7817 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7818 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7819 TREE_OPERAND (arg0, 1));
7822 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7823 of the same precision, and X is a integer type not narrower than
7824 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7825 if (INTEGRAL_TYPE_P (type)
7826 && TREE_CODE (op0) == BIT_NOT_EXPR
7827 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7828 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7829 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7830 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7832 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7833 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7834 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7835 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7838 tem = fold_convert_const (code, type, arg0);
7839 return tem ? tem : NULL_TREE;
7841 case VIEW_CONVERT_EXPR:
7842 if (TREE_TYPE (op0) == type)
7843 return op0;
7844 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7845 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7846 return fold_view_convert_expr (type, op0);
7848 case NEGATE_EXPR:
7849 tem = fold_negate_expr (arg0);
7850 if (tem)
7851 return fold_convert (type, tem);
7852 return NULL_TREE;
7854 case ABS_EXPR:
7855 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7856 return fold_abs_const (arg0, type);
7857 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7858 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7859 /* Convert fabs((double)float) into (double)fabsf(float). */
7860 else if (TREE_CODE (arg0) == NOP_EXPR
7861 && TREE_CODE (type) == REAL_TYPE)
7863 tree targ0 = strip_float_extensions (arg0);
7864 if (targ0 != arg0)
7865 return fold_convert (type, fold_build1 (ABS_EXPR,
7866 TREE_TYPE (targ0),
7867 targ0));
7869 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7870 else if (TREE_CODE (arg0) == ABS_EXPR)
7871 return arg0;
7872 else if (tree_expr_nonnegative_p (arg0))
7873 return arg0;
7875 /* Strip sign ops from argument. */
7876 if (TREE_CODE (type) == REAL_TYPE)
7878 tem = fold_strip_sign_ops (arg0);
7879 if (tem)
7880 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7882 return NULL_TREE;
7884 case CONJ_EXPR:
7885 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7886 return fold_convert (type, arg0);
7887 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7889 tree itype = TREE_TYPE (type);
7890 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7891 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7892 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7894 if (TREE_CODE (arg0) == COMPLEX_CST)
7896 tree itype = TREE_TYPE (type);
7897 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7898 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7899 return build_complex (type, rpart, negate_expr (ipart));
7901 if (TREE_CODE (arg0) == CONJ_EXPR)
7902 return fold_convert (type, TREE_OPERAND (arg0, 0));
7903 return NULL_TREE;
7905 case BIT_NOT_EXPR:
7906 if (TREE_CODE (arg0) == INTEGER_CST)
7907 return fold_not_const (arg0, type);
7908 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7909 return TREE_OPERAND (arg0, 0);
7910 /* Convert ~ (-A) to A - 1. */
7911 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7912 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7913 build_int_cst (type, 1));
7914 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7915 else if (INTEGRAL_TYPE_P (type)
7916 && ((TREE_CODE (arg0) == MINUS_EXPR
7917 && integer_onep (TREE_OPERAND (arg0, 1)))
7918 || (TREE_CODE (arg0) == PLUS_EXPR
7919 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7920 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7921 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7922 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7923 && (tem = fold_unary (BIT_NOT_EXPR, type,
7924 fold_convert (type,
7925 TREE_OPERAND (arg0, 0)))))
7926 return fold_build2 (BIT_XOR_EXPR, type, tem,
7927 fold_convert (type, TREE_OPERAND (arg0, 1)));
7928 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7929 && (tem = fold_unary (BIT_NOT_EXPR, type,
7930 fold_convert (type,
7931 TREE_OPERAND (arg0, 1)))))
7932 return fold_build2 (BIT_XOR_EXPR, type,
7933 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7935 return NULL_TREE;
7937 case TRUTH_NOT_EXPR:
7938 /* The argument to invert_truthvalue must have Boolean type. */
7939 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7940 arg0 = fold_convert (boolean_type_node, arg0);
7942 /* Note that the operand of this must be an int
7943 and its values must be 0 or 1.
7944 ("true" is a fixed value perhaps depending on the language,
7945 but we don't handle values other than 1 correctly yet.) */
7946 tem = fold_truth_not_expr (arg0);
7947 if (!tem)
7948 return NULL_TREE;
7949 return fold_convert (type, tem);
7951 case REALPART_EXPR:
7952 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7953 return fold_convert (type, arg0);
7954 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7955 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7956 TREE_OPERAND (arg0, 1));
7957 if (TREE_CODE (arg0) == COMPLEX_CST)
7958 return fold_convert (type, TREE_REALPART (arg0));
7959 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7961 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7962 tem = fold_build2 (TREE_CODE (arg0), itype,
7963 fold_build1 (REALPART_EXPR, itype,
7964 TREE_OPERAND (arg0, 0)),
7965 fold_build1 (REALPART_EXPR, itype,
7966 TREE_OPERAND (arg0, 1)));
7967 return fold_convert (type, tem);
7969 if (TREE_CODE (arg0) == CONJ_EXPR)
7971 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7972 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7973 return fold_convert (type, tem);
7975 if (TREE_CODE (arg0) == CALL_EXPR)
7977 tree fn = get_callee_fndecl (arg0);
7978 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7979 switch (DECL_FUNCTION_CODE (fn))
7981 CASE_FLT_FN (BUILT_IN_CEXPI):
7982 fn = mathfn_built_in (type, BUILT_IN_COS);
7983 if (fn)
7984 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
7985 break;
7987 default:
7988 break;
7991 return NULL_TREE;
7993 case IMAGPART_EXPR:
7994 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7995 return fold_convert (type, integer_zero_node);
7996 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7997 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7998 TREE_OPERAND (arg0, 0));
7999 if (TREE_CODE (arg0) == COMPLEX_CST)
8000 return fold_convert (type, TREE_IMAGPART (arg0));
8001 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8003 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8004 tem = fold_build2 (TREE_CODE (arg0), itype,
8005 fold_build1 (IMAGPART_EXPR, itype,
8006 TREE_OPERAND (arg0, 0)),
8007 fold_build1 (IMAGPART_EXPR, itype,
8008 TREE_OPERAND (arg0, 1)));
8009 return fold_convert (type, tem);
8011 if (TREE_CODE (arg0) == CONJ_EXPR)
8013 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8014 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8015 return fold_convert (type, negate_expr (tem));
8017 if (TREE_CODE (arg0) == CALL_EXPR)
8019 tree fn = get_callee_fndecl (arg0);
8020 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8021 switch (DECL_FUNCTION_CODE (fn))
8023 CASE_FLT_FN (BUILT_IN_CEXPI):
8024 fn = mathfn_built_in (type, BUILT_IN_SIN);
8025 if (fn)
8026 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8027 break;
8029 default:
8030 break;
8033 return NULL_TREE;
8035 default:
8036 return NULL_TREE;
8037 } /* switch (code) */
8040 /* Fold a binary expression of code CODE and type TYPE with operands
8041 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8042 Return the folded expression if folding is successful. Otherwise,
8043 return NULL_TREE. */
8045 static tree
8046 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8048 enum tree_code compl_code;
8050 if (code == MIN_EXPR)
8051 compl_code = MAX_EXPR;
8052 else if (code == MAX_EXPR)
8053 compl_code = MIN_EXPR;
8054 else
8055 gcc_unreachable ();
8057 /* MIN (MAX (a, b), b) == b. */
8058 if (TREE_CODE (op0) == compl_code
8059 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8060 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8062 /* MIN (MAX (b, a), b) == b. */
8063 if (TREE_CODE (op0) == compl_code
8064 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8065 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8066 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8068 /* MIN (a, MAX (a, b)) == a. */
8069 if (TREE_CODE (op1) == compl_code
8070 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8071 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8072 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8074 /* MIN (a, MAX (b, a)) == a. */
8075 if (TREE_CODE (op1) == compl_code
8076 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8077 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8078 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8080 return NULL_TREE;
8083 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8084 by changing CODE to reduce the magnitude of constants involved in
8085 ARG0 of the comparison.
8086 Returns a canonicalized comparison tree if a simplification was
8087 possible, otherwise returns NULL_TREE.
8088 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8089 valid if signed overflow is undefined. */
8091 static tree
8092 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8093 tree arg0, tree arg1,
8094 bool *strict_overflow_p)
8096 enum tree_code code0 = TREE_CODE (arg0);
8097 tree t, cst0 = NULL_TREE;
8098 int sgn0;
8099 bool swap = false;
8101 /* Match A +- CST code arg1 and CST code arg1. */
8102 if (!(((code0 == MINUS_EXPR
8103 || code0 == PLUS_EXPR)
8104 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8105 || code0 == INTEGER_CST))
8106 return NULL_TREE;
8108 /* Identify the constant in arg0 and its sign. */
8109 if (code0 == INTEGER_CST)
8110 cst0 = arg0;
8111 else
8112 cst0 = TREE_OPERAND (arg0, 1);
8113 sgn0 = tree_int_cst_sgn (cst0);
8115 /* Overflowed constants and zero will cause problems. */
8116 if (integer_zerop (cst0)
8117 || TREE_OVERFLOW (cst0))
8118 return NULL_TREE;
8120 /* See if we can reduce the magnitude of the constant in
8121 arg0 by changing the comparison code. */
8122 if (code0 == INTEGER_CST)
8124 /* CST <= arg1 -> CST-1 < arg1. */
8125 if (code == LE_EXPR && sgn0 == 1)
8126 code = LT_EXPR;
8127 /* -CST < arg1 -> -CST-1 <= arg1. */
8128 else if (code == LT_EXPR && sgn0 == -1)
8129 code = LE_EXPR;
8130 /* CST > arg1 -> CST-1 >= arg1. */
8131 else if (code == GT_EXPR && sgn0 == 1)
8132 code = GE_EXPR;
8133 /* -CST >= arg1 -> -CST-1 > arg1. */
8134 else if (code == GE_EXPR && sgn0 == -1)
8135 code = GT_EXPR;
8136 else
8137 return NULL_TREE;
8138 /* arg1 code' CST' might be more canonical. */
8139 swap = true;
8141 else
8143 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8144 if (code == LT_EXPR
8145 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8146 code = LE_EXPR;
8147 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8148 else if (code == GT_EXPR
8149 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8150 code = GE_EXPR;
8151 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8152 else if (code == LE_EXPR
8153 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8154 code = LT_EXPR;
8155 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8156 else if (code == GE_EXPR
8157 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8158 code = GT_EXPR;
8159 else
8160 return NULL_TREE;
8161 *strict_overflow_p = true;
8164 /* Now build the constant reduced in magnitude. */
8165 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8166 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8167 if (code0 != INTEGER_CST)
8168 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8170 /* If swapping might yield to a more canonical form, do so. */
8171 if (swap)
8172 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8173 else
8174 return fold_build2 (code, type, t, arg1);
8177 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8178 overflow further. Try to decrease the magnitude of constants involved
8179 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8180 and put sole constants at the second argument position.
8181 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8183 static tree
8184 maybe_canonicalize_comparison (enum tree_code code, tree type,
8185 tree arg0, tree arg1)
8187 tree t;
8188 bool strict_overflow_p;
8189 const char * const warnmsg = G_("assuming signed overflow does not occur "
8190 "when reducing constant in comparison");
8192 /* In principle pointers also have undefined overflow behavior,
8193 but that causes problems elsewhere. */
8194 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8195 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8196 return NULL_TREE;
8198 /* Try canonicalization by simplifying arg0. */
8199 strict_overflow_p = false;
8200 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8201 &strict_overflow_p);
8202 if (t)
8204 if (strict_overflow_p)
8205 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8206 return t;
8209 /* Try canonicalization by simplifying arg1 using the swapped
8210 comparison. */
8211 code = swap_tree_comparison (code);
8212 strict_overflow_p = false;
8213 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8214 &strict_overflow_p);
8215 if (t && strict_overflow_p)
8216 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8217 return t;
8220 /* Subroutine of fold_binary. This routine performs all of the
8221 transformations that are common to the equality/inequality
8222 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8223 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8224 fold_binary should call fold_binary. Fold a comparison with
8225 tree code CODE and type TYPE with operands OP0 and OP1. Return
8226 the folded comparison or NULL_TREE. */
8228 static tree
8229 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8231 tree arg0, arg1, tem;
8233 arg0 = op0;
8234 arg1 = op1;
8236 STRIP_SIGN_NOPS (arg0);
8237 STRIP_SIGN_NOPS (arg1);
8239 tem = fold_relational_const (code, type, arg0, arg1);
8240 if (tem != NULL_TREE)
8241 return tem;
8243 /* If one arg is a real or integer constant, put it last. */
8244 if (tree_swap_operands_p (arg0, arg1, true))
8245 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8247 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8248 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8249 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8250 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8251 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8252 && (TREE_CODE (arg1) == INTEGER_CST
8253 && !TREE_OVERFLOW (arg1)))
8255 tree const1 = TREE_OPERAND (arg0, 1);
8256 tree const2 = arg1;
8257 tree variable = TREE_OPERAND (arg0, 0);
8258 tree lhs;
8259 int lhs_add;
8260 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8262 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8263 TREE_TYPE (arg1), const2, const1);
8265 /* If the constant operation overflowed this can be
8266 simplified as a comparison against INT_MAX/INT_MIN. */
8267 if (TREE_CODE (lhs) == INTEGER_CST
8268 && TREE_OVERFLOW (lhs))
8270 int const1_sgn = tree_int_cst_sgn (const1);
8271 enum tree_code code2 = code;
8273 /* Get the sign of the constant on the lhs if the
8274 operation were VARIABLE + CONST1. */
8275 if (TREE_CODE (arg0) == MINUS_EXPR)
8276 const1_sgn = -const1_sgn;
8278 /* The sign of the constant determines if we overflowed
8279 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8280 Canonicalize to the INT_MIN overflow by swapping the comparison
8281 if necessary. */
8282 if (const1_sgn == -1)
8283 code2 = swap_tree_comparison (code);
8285 /* We now can look at the canonicalized case
8286 VARIABLE + 1 CODE2 INT_MIN
8287 and decide on the result. */
8288 if (code2 == LT_EXPR
8289 || code2 == LE_EXPR
8290 || code2 == EQ_EXPR)
8291 return omit_one_operand (type, boolean_false_node, variable);
8292 else if (code2 == NE_EXPR
8293 || code2 == GE_EXPR
8294 || code2 == GT_EXPR)
8295 return omit_one_operand (type, boolean_true_node, variable);
8298 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8299 && (TREE_CODE (lhs) != INTEGER_CST
8300 || !TREE_OVERFLOW (lhs)))
8302 fold_overflow_warning (("assuming signed overflow does not occur "
8303 "when changing X +- C1 cmp C2 to "
8304 "X cmp C1 +- C2"),
8305 WARN_STRICT_OVERFLOW_COMPARISON);
8306 return fold_build2 (code, type, variable, lhs);
8310 /* For comparisons of pointers we can decompose it to a compile time
8311 comparison of the base objects and the offsets into the object.
8312 This requires at least one operand being an ADDR_EXPR to do more
8313 than the operand_equal_p test below. */
8314 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8315 && (TREE_CODE (arg0) == ADDR_EXPR
8316 || TREE_CODE (arg1) == ADDR_EXPR))
8318 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8319 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8320 enum machine_mode mode;
8321 int volatilep, unsignedp;
8322 bool indirect_base0 = false;
8324 /* Get base and offset for the access. Strip ADDR_EXPR for
8325 get_inner_reference, but put it back by stripping INDIRECT_REF
8326 off the base object if possible. */
8327 base0 = arg0;
8328 if (TREE_CODE (arg0) == ADDR_EXPR)
8330 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8331 &bitsize, &bitpos0, &offset0, &mode,
8332 &unsignedp, &volatilep, false);
8333 if (TREE_CODE (base0) == INDIRECT_REF)
8334 base0 = TREE_OPERAND (base0, 0);
8335 else
8336 indirect_base0 = true;
8339 base1 = arg1;
8340 if (TREE_CODE (arg1) == ADDR_EXPR)
8342 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8343 &bitsize, &bitpos1, &offset1, &mode,
8344 &unsignedp, &volatilep, false);
8345 /* We have to make sure to have an indirect/non-indirect base1
8346 just the same as we did for base0. */
8347 if (TREE_CODE (base1) == INDIRECT_REF
8348 && !indirect_base0)
8349 base1 = TREE_OPERAND (base1, 0);
8350 else if (!indirect_base0)
8351 base1 = NULL_TREE;
8353 else if (indirect_base0)
8354 base1 = NULL_TREE;
8356 /* If we have equivalent bases we might be able to simplify. */
8357 if (base0 && base1
8358 && operand_equal_p (base0, base1, 0))
8360 /* We can fold this expression to a constant if the non-constant
8361 offset parts are equal. */
8362 if (offset0 == offset1
8363 || (offset0 && offset1
8364 && operand_equal_p (offset0, offset1, 0)))
8366 switch (code)
8368 case EQ_EXPR:
8369 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8370 case NE_EXPR:
8371 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8372 case LT_EXPR:
8373 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8374 case LE_EXPR:
8375 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8376 case GE_EXPR:
8377 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8378 case GT_EXPR:
8379 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8380 default:;
8383 /* We can simplify the comparison to a comparison of the variable
8384 offset parts if the constant offset parts are equal.
8385 Be careful to use signed size type here because otherwise we
8386 mess with array offsets in the wrong way. This is possible
8387 because pointer arithmetic is restricted to retain within an
8388 object and overflow on pointer differences is undefined as of
8389 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8390 else if (bitpos0 == bitpos1)
8392 tree signed_size_type_node;
8393 signed_size_type_node = signed_type_for (size_type_node);
8395 /* By converting to signed size type we cover middle-end pointer
8396 arithmetic which operates on unsigned pointer types of size
8397 type size and ARRAY_REF offsets which are properly sign or
8398 zero extended from their type in case it is narrower than
8399 size type. */
8400 if (offset0 == NULL_TREE)
8401 offset0 = build_int_cst (signed_size_type_node, 0);
8402 else
8403 offset0 = fold_convert (signed_size_type_node, offset0);
8404 if (offset1 == NULL_TREE)
8405 offset1 = build_int_cst (signed_size_type_node, 0);
8406 else
8407 offset1 = fold_convert (signed_size_type_node, offset1);
8409 return fold_build2 (code, type, offset0, offset1);
8414 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8415 same object, then we can fold this to a comparison of the two offsets in
8416 signed size type. This is possible because pointer arithmetic is
8417 restricted to retain within an object and overflow on pointer differences
8418 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8420 We check flag_wrapv directly because pointers types are unsigned,
8421 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8422 normally what we want to avoid certain odd overflow cases, but
8423 not here. */
8424 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8425 && !flag_wrapv
8426 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8428 tree base0, offset0, base1, offset1;
8430 if (extract_array_ref (arg0, &base0, &offset0)
8431 && extract_array_ref (arg1, &base1, &offset1)
8432 && operand_equal_p (base0, base1, 0))
8434 tree signed_size_type_node;
8435 signed_size_type_node = signed_type_for (size_type_node);
8437 /* By converting to signed size type we cover middle-end pointer
8438 arithmetic which operates on unsigned pointer types of size
8439 type size and ARRAY_REF offsets which are properly sign or
8440 zero extended from their type in case it is narrower than
8441 size type. */
8442 if (offset0 == NULL_TREE)
8443 offset0 = build_int_cst (signed_size_type_node, 0);
8444 else
8445 offset0 = fold_convert (signed_size_type_node, offset0);
8446 if (offset1 == NULL_TREE)
8447 offset1 = build_int_cst (signed_size_type_node, 0);
8448 else
8449 offset1 = fold_convert (signed_size_type_node, offset1);
8451 return fold_build2 (code, type, offset0, offset1);
8455 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8456 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8457 the resulting offset is smaller in absolute value than the
8458 original one. */
8459 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8460 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8461 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8462 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8463 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8464 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8465 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8467 tree const1 = TREE_OPERAND (arg0, 1);
8468 tree const2 = TREE_OPERAND (arg1, 1);
8469 tree variable1 = TREE_OPERAND (arg0, 0);
8470 tree variable2 = TREE_OPERAND (arg1, 0);
8471 tree cst;
8472 const char * const warnmsg = G_("assuming signed overflow does not "
8473 "occur when combining constants around "
8474 "a comparison");
8476 /* Put the constant on the side where it doesn't overflow and is
8477 of lower absolute value than before. */
8478 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8479 ? MINUS_EXPR : PLUS_EXPR,
8480 const2, const1, 0);
8481 if (!TREE_OVERFLOW (cst)
8482 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8484 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8485 return fold_build2 (code, type,
8486 variable1,
8487 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8488 variable2, cst));
8491 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8492 ? MINUS_EXPR : PLUS_EXPR,
8493 const1, const2, 0);
8494 if (!TREE_OVERFLOW (cst)
8495 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8497 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8498 return fold_build2 (code, type,
8499 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8500 variable1, cst),
8501 variable2);
8505 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8506 signed arithmetic case. That form is created by the compiler
8507 often enough for folding it to be of value. One example is in
8508 computing loop trip counts after Operator Strength Reduction. */
8509 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8510 && TREE_CODE (arg0) == MULT_EXPR
8511 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8512 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8513 && integer_zerop (arg1))
8515 tree const1 = TREE_OPERAND (arg0, 1);
8516 tree const2 = arg1; /* zero */
8517 tree variable1 = TREE_OPERAND (arg0, 0);
8518 enum tree_code cmp_code = code;
8520 gcc_assert (!integer_zerop (const1));
8522 fold_overflow_warning (("assuming signed overflow does not occur when "
8523 "eliminating multiplication in comparison "
8524 "with zero"),
8525 WARN_STRICT_OVERFLOW_COMPARISON);
8527 /* If const1 is negative we swap the sense of the comparison. */
8528 if (tree_int_cst_sgn (const1) < 0)
8529 cmp_code = swap_tree_comparison (cmp_code);
8531 return fold_build2 (cmp_code, type, variable1, const2);
8534 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8535 if (tem)
8536 return tem;
8538 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8540 tree targ0 = strip_float_extensions (arg0);
8541 tree targ1 = strip_float_extensions (arg1);
8542 tree newtype = TREE_TYPE (targ0);
8544 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8545 newtype = TREE_TYPE (targ1);
8547 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8548 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8549 return fold_build2 (code, type, fold_convert (newtype, targ0),
8550 fold_convert (newtype, targ1));
8552 /* (-a) CMP (-b) -> b CMP a */
8553 if (TREE_CODE (arg0) == NEGATE_EXPR
8554 && TREE_CODE (arg1) == NEGATE_EXPR)
8555 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8556 TREE_OPERAND (arg0, 0));
8558 if (TREE_CODE (arg1) == REAL_CST)
8560 REAL_VALUE_TYPE cst;
8561 cst = TREE_REAL_CST (arg1);
8563 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8564 if (TREE_CODE (arg0) == NEGATE_EXPR)
8565 return fold_build2 (swap_tree_comparison (code), type,
8566 TREE_OPERAND (arg0, 0),
8567 build_real (TREE_TYPE (arg1),
8568 REAL_VALUE_NEGATE (cst)));
8570 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8571 /* a CMP (-0) -> a CMP 0 */
8572 if (REAL_VALUE_MINUS_ZERO (cst))
8573 return fold_build2 (code, type, arg0,
8574 build_real (TREE_TYPE (arg1), dconst0));
8576 /* x != NaN is always true, other ops are always false. */
8577 if (REAL_VALUE_ISNAN (cst)
8578 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8580 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8581 return omit_one_operand (type, tem, arg0);
8584 /* Fold comparisons against infinity. */
8585 if (REAL_VALUE_ISINF (cst))
8587 tem = fold_inf_compare (code, type, arg0, arg1);
8588 if (tem != NULL_TREE)
8589 return tem;
8593 /* If this is a comparison of a real constant with a PLUS_EXPR
8594 or a MINUS_EXPR of a real constant, we can convert it into a
8595 comparison with a revised real constant as long as no overflow
8596 occurs when unsafe_math_optimizations are enabled. */
8597 if (flag_unsafe_math_optimizations
8598 && TREE_CODE (arg1) == REAL_CST
8599 && (TREE_CODE (arg0) == PLUS_EXPR
8600 || TREE_CODE (arg0) == MINUS_EXPR)
8601 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8602 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8603 ? MINUS_EXPR : PLUS_EXPR,
8604 arg1, TREE_OPERAND (arg0, 1), 0))
8605 && !TREE_OVERFLOW (tem))
8606 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8608 /* Likewise, we can simplify a comparison of a real constant with
8609 a MINUS_EXPR whose first operand is also a real constant, i.e.
8610 (c1 - x) < c2 becomes x > c1-c2. */
8611 if (flag_unsafe_math_optimizations
8612 && TREE_CODE (arg1) == REAL_CST
8613 && TREE_CODE (arg0) == MINUS_EXPR
8614 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8615 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8616 arg1, 0))
8617 && !TREE_OVERFLOW (tem))
8618 return fold_build2 (swap_tree_comparison (code), type,
8619 TREE_OPERAND (arg0, 1), tem);
8621 /* Fold comparisons against built-in math functions. */
8622 if (TREE_CODE (arg1) == REAL_CST
8623 && flag_unsafe_math_optimizations
8624 && ! flag_errno_math)
8626 enum built_in_function fcode = builtin_mathfn_code (arg0);
8628 if (fcode != END_BUILTINS)
8630 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8631 if (tem != NULL_TREE)
8632 return tem;
8637 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8638 if (TREE_CONSTANT (arg1)
8639 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8640 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8641 /* This optimization is invalid for ordered comparisons
8642 if CONST+INCR overflows or if foo+incr might overflow.
8643 This optimization is invalid for floating point due to rounding.
8644 For pointer types we assume overflow doesn't happen. */
8645 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8646 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8647 && (code == EQ_EXPR || code == NE_EXPR))))
8649 tree varop, newconst;
8651 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8653 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8654 arg1, TREE_OPERAND (arg0, 1));
8655 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8656 TREE_OPERAND (arg0, 0),
8657 TREE_OPERAND (arg0, 1));
8659 else
8661 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8662 arg1, TREE_OPERAND (arg0, 1));
8663 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8664 TREE_OPERAND (arg0, 0),
8665 TREE_OPERAND (arg0, 1));
8669 /* If VAROP is a reference to a bitfield, we must mask
8670 the constant by the width of the field. */
8671 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8672 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8673 && host_integerp (DECL_SIZE (TREE_OPERAND
8674 (TREE_OPERAND (varop, 0), 1)), 1))
8676 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8677 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8678 tree folded_compare, shift;
8680 /* First check whether the comparison would come out
8681 always the same. If we don't do that we would
8682 change the meaning with the masking. */
8683 folded_compare = fold_build2 (code, type,
8684 TREE_OPERAND (varop, 0), arg1);
8685 if (TREE_CODE (folded_compare) == INTEGER_CST)
8686 return omit_one_operand (type, folded_compare, varop);
8688 shift = build_int_cst (NULL_TREE,
8689 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8690 shift = fold_convert (TREE_TYPE (varop), shift);
8691 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8692 newconst, shift);
8693 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8694 newconst, shift);
8697 return fold_build2 (code, type, varop, newconst);
8700 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8701 && (TREE_CODE (arg0) == NOP_EXPR
8702 || TREE_CODE (arg0) == CONVERT_EXPR))
8704 /* If we are widening one operand of an integer comparison,
8705 see if the other operand is similarly being widened. Perhaps we
8706 can do the comparison in the narrower type. */
8707 tem = fold_widened_comparison (code, type, arg0, arg1);
8708 if (tem)
8709 return tem;
8711 /* Or if we are changing signedness. */
8712 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8713 if (tem)
8714 return tem;
8717 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8718 constant, we can simplify it. */
8719 if (TREE_CODE (arg1) == INTEGER_CST
8720 && (TREE_CODE (arg0) == MIN_EXPR
8721 || TREE_CODE (arg0) == MAX_EXPR)
8722 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8724 tem = optimize_minmax_comparison (code, type, op0, op1);
8725 if (tem)
8726 return tem;
8729 /* Simplify comparison of something with itself. (For IEEE
8730 floating-point, we can only do some of these simplifications.) */
8731 if (operand_equal_p (arg0, arg1, 0))
8733 switch (code)
8735 case EQ_EXPR:
8736 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8737 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8738 return constant_boolean_node (1, type);
8739 break;
8741 case GE_EXPR:
8742 case LE_EXPR:
8743 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8744 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8745 return constant_boolean_node (1, type);
8746 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8748 case NE_EXPR:
8749 /* For NE, we can only do this simplification if integer
8750 or we don't honor IEEE floating point NaNs. */
8751 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8752 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8753 break;
8754 /* ... fall through ... */
8755 case GT_EXPR:
8756 case LT_EXPR:
8757 return constant_boolean_node (0, type);
8758 default:
8759 gcc_unreachable ();
8763 /* If we are comparing an expression that just has comparisons
8764 of two integer values, arithmetic expressions of those comparisons,
8765 and constants, we can simplify it. There are only three cases
8766 to check: the two values can either be equal, the first can be
8767 greater, or the second can be greater. Fold the expression for
8768 those three values. Since each value must be 0 or 1, we have
8769 eight possibilities, each of which corresponds to the constant 0
8770 or 1 or one of the six possible comparisons.
8772 This handles common cases like (a > b) == 0 but also handles
8773 expressions like ((x > y) - (y > x)) > 0, which supposedly
8774 occur in macroized code. */
8776 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8778 tree cval1 = 0, cval2 = 0;
8779 int save_p = 0;
8781 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8782 /* Don't handle degenerate cases here; they should already
8783 have been handled anyway. */
8784 && cval1 != 0 && cval2 != 0
8785 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8786 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8787 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8788 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8789 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8790 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8791 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8793 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8794 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8796 /* We can't just pass T to eval_subst in case cval1 or cval2
8797 was the same as ARG1. */
8799 tree high_result
8800 = fold_build2 (code, type,
8801 eval_subst (arg0, cval1, maxval,
8802 cval2, minval),
8803 arg1);
8804 tree equal_result
8805 = fold_build2 (code, type,
8806 eval_subst (arg0, cval1, maxval,
8807 cval2, maxval),
8808 arg1);
8809 tree low_result
8810 = fold_build2 (code, type,
8811 eval_subst (arg0, cval1, minval,
8812 cval2, maxval),
8813 arg1);
8815 /* All three of these results should be 0 or 1. Confirm they are.
8816 Then use those values to select the proper code to use. */
8818 if (TREE_CODE (high_result) == INTEGER_CST
8819 && TREE_CODE (equal_result) == INTEGER_CST
8820 && TREE_CODE (low_result) == INTEGER_CST)
8822 /* Make a 3-bit mask with the high-order bit being the
8823 value for `>', the next for '=', and the low for '<'. */
8824 switch ((integer_onep (high_result) * 4)
8825 + (integer_onep (equal_result) * 2)
8826 + integer_onep (low_result))
8828 case 0:
8829 /* Always false. */
8830 return omit_one_operand (type, integer_zero_node, arg0);
8831 case 1:
8832 code = LT_EXPR;
8833 break;
8834 case 2:
8835 code = EQ_EXPR;
8836 break;
8837 case 3:
8838 code = LE_EXPR;
8839 break;
8840 case 4:
8841 code = GT_EXPR;
8842 break;
8843 case 5:
8844 code = NE_EXPR;
8845 break;
8846 case 6:
8847 code = GE_EXPR;
8848 break;
8849 case 7:
8850 /* Always true. */
8851 return omit_one_operand (type, integer_one_node, arg0);
8854 if (save_p)
8855 return save_expr (build2 (code, type, cval1, cval2));
8856 return fold_build2 (code, type, cval1, cval2);
8861 /* Fold a comparison of the address of COMPONENT_REFs with the same
8862 type and component to a comparison of the address of the base
8863 object. In short, &x->a OP &y->a to x OP y and
8864 &x->a OP &y.a to x OP &y */
8865 if (TREE_CODE (arg0) == ADDR_EXPR
8866 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8867 && TREE_CODE (arg1) == ADDR_EXPR
8868 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8870 tree cref0 = TREE_OPERAND (arg0, 0);
8871 tree cref1 = TREE_OPERAND (arg1, 0);
8872 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8874 tree op0 = TREE_OPERAND (cref0, 0);
8875 tree op1 = TREE_OPERAND (cref1, 0);
8876 return fold_build2 (code, type,
8877 build_fold_addr_expr (op0),
8878 build_fold_addr_expr (op1));
8882 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8883 into a single range test. */
8884 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8885 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8886 && TREE_CODE (arg1) == INTEGER_CST
8887 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8888 && !integer_zerop (TREE_OPERAND (arg0, 1))
8889 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8890 && !TREE_OVERFLOW (arg1))
8892 tem = fold_div_compare (code, type, arg0, arg1);
8893 if (tem != NULL_TREE)
8894 return tem;
8897 /* Fold ~X op ~Y as Y op X. */
8898 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8899 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8901 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8902 return fold_build2 (code, type,
8903 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8904 TREE_OPERAND (arg0, 0));
8907 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8908 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8909 && TREE_CODE (arg1) == INTEGER_CST)
8911 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8912 return fold_build2 (swap_tree_comparison (code), type,
8913 TREE_OPERAND (arg0, 0),
8914 fold_build1 (BIT_NOT_EXPR, cmp_type,
8915 fold_convert (cmp_type, arg1)));
8918 return NULL_TREE;
8922 /* Subroutine of fold_binary. Optimize complex multiplications of the
8923 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8924 argument EXPR represents the expression "z" of type TYPE. */
8926 static tree
8927 fold_mult_zconjz (tree type, tree expr)
8929 tree itype = TREE_TYPE (type);
8930 tree rpart, ipart, tem;
8932 if (TREE_CODE (expr) == COMPLEX_EXPR)
8934 rpart = TREE_OPERAND (expr, 0);
8935 ipart = TREE_OPERAND (expr, 1);
8937 else if (TREE_CODE (expr) == COMPLEX_CST)
8939 rpart = TREE_REALPART (expr);
8940 ipart = TREE_IMAGPART (expr);
8942 else
8944 expr = save_expr (expr);
8945 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8946 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8949 rpart = save_expr (rpart);
8950 ipart = save_expr (ipart);
8951 tem = fold_build2 (PLUS_EXPR, itype,
8952 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8953 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8954 return fold_build2 (COMPLEX_EXPR, type, tem,
8955 fold_convert (itype, integer_zero_node));
8959 /* Fold a binary expression of code CODE and type TYPE with operands
8960 OP0 and OP1. Return the folded expression if folding is
8961 successful. Otherwise, return NULL_TREE. */
8963 tree
8964 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8966 enum tree_code_class kind = TREE_CODE_CLASS (code);
8967 tree arg0, arg1, tem;
8968 tree t1 = NULL_TREE;
8969 bool strict_overflow_p;
8971 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8972 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8973 && TREE_CODE_LENGTH (code) == 2
8974 && op0 != NULL_TREE
8975 && op1 != NULL_TREE);
8977 arg0 = op0;
8978 arg1 = op1;
8980 /* Strip any conversions that don't change the mode. This is
8981 safe for every expression, except for a comparison expression
8982 because its signedness is derived from its operands. So, in
8983 the latter case, only strip conversions that don't change the
8984 signedness.
8986 Note that this is done as an internal manipulation within the
8987 constant folder, in order to find the simplest representation
8988 of the arguments so that their form can be studied. In any
8989 cases, the appropriate type conversions should be put back in
8990 the tree that will get out of the constant folder. */
8992 if (kind == tcc_comparison)
8994 STRIP_SIGN_NOPS (arg0);
8995 STRIP_SIGN_NOPS (arg1);
8997 else
8999 STRIP_NOPS (arg0);
9000 STRIP_NOPS (arg1);
9003 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9004 constant but we can't do arithmetic on them. */
9005 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9006 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9007 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9008 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9010 if (kind == tcc_binary)
9011 tem = const_binop (code, arg0, arg1, 0);
9012 else if (kind == tcc_comparison)
9013 tem = fold_relational_const (code, type, arg0, arg1);
9014 else
9015 tem = NULL_TREE;
9017 if (tem != NULL_TREE)
9019 if (TREE_TYPE (tem) != type)
9020 tem = fold_convert (type, tem);
9021 return tem;
9025 /* If this is a commutative operation, and ARG0 is a constant, move it
9026 to ARG1 to reduce the number of tests below. */
9027 if (commutative_tree_code (code)
9028 && tree_swap_operands_p (arg0, arg1, true))
9029 return fold_build2 (code, type, op1, op0);
9031 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9033 First check for cases where an arithmetic operation is applied to a
9034 compound, conditional, or comparison operation. Push the arithmetic
9035 operation inside the compound or conditional to see if any folding
9036 can then be done. Convert comparison to conditional for this purpose.
9037 The also optimizes non-constant cases that used to be done in
9038 expand_expr.
9040 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9041 one of the operands is a comparison and the other is a comparison, a
9042 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9043 code below would make the expression more complex. Change it to a
9044 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9045 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9047 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9048 || code == EQ_EXPR || code == NE_EXPR)
9049 && ((truth_value_p (TREE_CODE (arg0))
9050 && (truth_value_p (TREE_CODE (arg1))
9051 || (TREE_CODE (arg1) == BIT_AND_EXPR
9052 && integer_onep (TREE_OPERAND (arg1, 1)))))
9053 || (truth_value_p (TREE_CODE (arg1))
9054 && (truth_value_p (TREE_CODE (arg0))
9055 || (TREE_CODE (arg0) == BIT_AND_EXPR
9056 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9058 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9059 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9060 : TRUTH_XOR_EXPR,
9061 boolean_type_node,
9062 fold_convert (boolean_type_node, arg0),
9063 fold_convert (boolean_type_node, arg1));
9065 if (code == EQ_EXPR)
9066 tem = invert_truthvalue (tem);
9068 return fold_convert (type, tem);
9071 if (TREE_CODE_CLASS (code) == tcc_binary
9072 || TREE_CODE_CLASS (code) == tcc_comparison)
9074 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9075 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9076 fold_build2 (code, type,
9077 TREE_OPERAND (arg0, 1), op1));
9078 if (TREE_CODE (arg1) == COMPOUND_EXPR
9079 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9080 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9081 fold_build2 (code, type,
9082 op0, TREE_OPERAND (arg1, 1)));
9084 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9086 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9087 arg0, arg1,
9088 /*cond_first_p=*/1);
9089 if (tem != NULL_TREE)
9090 return tem;
9093 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9095 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9096 arg1, arg0,
9097 /*cond_first_p=*/0);
9098 if (tem != NULL_TREE)
9099 return tem;
9103 switch (code)
9105 case PLUS_EXPR:
9106 /* A + (-B) -> A - B */
9107 if (TREE_CODE (arg1) == NEGATE_EXPR)
9108 return fold_build2 (MINUS_EXPR, type,
9109 fold_convert (type, arg0),
9110 fold_convert (type, TREE_OPERAND (arg1, 0)));
9111 /* (-A) + B -> B - A */
9112 if (TREE_CODE (arg0) == NEGATE_EXPR
9113 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9114 return fold_build2 (MINUS_EXPR, type,
9115 fold_convert (type, arg1),
9116 fold_convert (type, TREE_OPERAND (arg0, 0)));
9117 /* Convert ~A + 1 to -A. */
9118 if (INTEGRAL_TYPE_P (type)
9119 && TREE_CODE (arg0) == BIT_NOT_EXPR
9120 && integer_onep (arg1))
9121 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9123 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9124 same or one. */
9125 if ((TREE_CODE (arg0) == MULT_EXPR
9126 || TREE_CODE (arg1) == MULT_EXPR)
9127 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9129 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9130 if (tem)
9131 return tem;
9134 if (! FLOAT_TYPE_P (type))
9136 if (integer_zerop (arg1))
9137 return non_lvalue (fold_convert (type, arg0));
9139 /* ~X + X is -1. */
9140 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9141 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9142 && !TYPE_OVERFLOW_TRAPS (type))
9144 t1 = build_int_cst_type (type, -1);
9145 return omit_one_operand (type, t1, arg1);
9148 /* X + ~X is -1. */
9149 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9150 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9151 && !TYPE_OVERFLOW_TRAPS (type))
9153 t1 = build_int_cst_type (type, -1);
9154 return omit_one_operand (type, t1, arg0);
9157 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9158 with a constant, and the two constants have no bits in common,
9159 we should treat this as a BIT_IOR_EXPR since this may produce more
9160 simplifications. */
9161 if (TREE_CODE (arg0) == BIT_AND_EXPR
9162 && TREE_CODE (arg1) == BIT_AND_EXPR
9163 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9164 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9165 && integer_zerop (const_binop (BIT_AND_EXPR,
9166 TREE_OPERAND (arg0, 1),
9167 TREE_OPERAND (arg1, 1), 0)))
9169 code = BIT_IOR_EXPR;
9170 goto bit_ior;
9173 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9174 (plus (plus (mult) (mult)) (foo)) so that we can
9175 take advantage of the factoring cases below. */
9176 if (((TREE_CODE (arg0) == PLUS_EXPR
9177 || TREE_CODE (arg0) == MINUS_EXPR)
9178 && TREE_CODE (arg1) == MULT_EXPR)
9179 || ((TREE_CODE (arg1) == PLUS_EXPR
9180 || TREE_CODE (arg1) == MINUS_EXPR)
9181 && TREE_CODE (arg0) == MULT_EXPR))
9183 tree parg0, parg1, parg, marg;
9184 enum tree_code pcode;
9186 if (TREE_CODE (arg1) == MULT_EXPR)
9187 parg = arg0, marg = arg1;
9188 else
9189 parg = arg1, marg = arg0;
9190 pcode = TREE_CODE (parg);
9191 parg0 = TREE_OPERAND (parg, 0);
9192 parg1 = TREE_OPERAND (parg, 1);
9193 STRIP_NOPS (parg0);
9194 STRIP_NOPS (parg1);
9196 if (TREE_CODE (parg0) == MULT_EXPR
9197 && TREE_CODE (parg1) != MULT_EXPR)
9198 return fold_build2 (pcode, type,
9199 fold_build2 (PLUS_EXPR, type,
9200 fold_convert (type, parg0),
9201 fold_convert (type, marg)),
9202 fold_convert (type, parg1));
9203 if (TREE_CODE (parg0) != MULT_EXPR
9204 && TREE_CODE (parg1) == MULT_EXPR)
9205 return fold_build2 (PLUS_EXPR, type,
9206 fold_convert (type, parg0),
9207 fold_build2 (pcode, type,
9208 fold_convert (type, marg),
9209 fold_convert (type,
9210 parg1)));
9213 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9214 of the array. Loop optimizer sometimes produce this type of
9215 expressions. */
9216 if (TREE_CODE (arg0) == ADDR_EXPR)
9218 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
9219 if (tem)
9220 return fold_convert (type, tem);
9222 else if (TREE_CODE (arg1) == ADDR_EXPR)
9224 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
9225 if (tem)
9226 return fold_convert (type, tem);
9229 else
9231 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9232 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9233 return non_lvalue (fold_convert (type, arg0));
9235 /* Likewise if the operands are reversed. */
9236 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9237 return non_lvalue (fold_convert (type, arg1));
9239 /* Convert X + -C into X - C. */
9240 if (TREE_CODE (arg1) == REAL_CST
9241 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9243 tem = fold_negate_const (arg1, type);
9244 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9245 return fold_build2 (MINUS_EXPR, type,
9246 fold_convert (type, arg0),
9247 fold_convert (type, tem));
9250 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9251 to __complex__ ( x, y ). This is not the same for SNaNs or
9252 if signed zeros are involved. */
9253 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9254 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9255 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9257 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9258 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9259 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9260 bool arg0rz = false, arg0iz = false;
9261 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9262 || (arg0i && (arg0iz = real_zerop (arg0i))))
9264 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9265 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9266 if (arg0rz && arg1i && real_zerop (arg1i))
9268 tree rp = arg1r ? arg1r
9269 : build1 (REALPART_EXPR, rtype, arg1);
9270 tree ip = arg0i ? arg0i
9271 : build1 (IMAGPART_EXPR, rtype, arg0);
9272 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9274 else if (arg0iz && arg1r && real_zerop (arg1r))
9276 tree rp = arg0r ? arg0r
9277 : build1 (REALPART_EXPR, rtype, arg0);
9278 tree ip = arg1i ? arg1i
9279 : build1 (IMAGPART_EXPR, rtype, arg1);
9280 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9285 if (flag_unsafe_math_optimizations
9286 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9287 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9288 && (tem = distribute_real_division (code, type, arg0, arg1)))
9289 return tem;
9291 /* Convert x+x into x*2.0. */
9292 if (operand_equal_p (arg0, arg1, 0)
9293 && SCALAR_FLOAT_TYPE_P (type))
9294 return fold_build2 (MULT_EXPR, type, arg0,
9295 build_real (type, dconst2));
9297 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9298 if (flag_unsafe_math_optimizations
9299 && TREE_CODE (arg1) == PLUS_EXPR
9300 && TREE_CODE (arg0) != MULT_EXPR)
9302 tree tree10 = TREE_OPERAND (arg1, 0);
9303 tree tree11 = TREE_OPERAND (arg1, 1);
9304 if (TREE_CODE (tree11) == MULT_EXPR
9305 && TREE_CODE (tree10) == MULT_EXPR)
9307 tree tree0;
9308 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9309 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9312 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9313 if (flag_unsafe_math_optimizations
9314 && TREE_CODE (arg0) == PLUS_EXPR
9315 && TREE_CODE (arg1) != MULT_EXPR)
9317 tree tree00 = TREE_OPERAND (arg0, 0);
9318 tree tree01 = TREE_OPERAND (arg0, 1);
9319 if (TREE_CODE (tree01) == MULT_EXPR
9320 && TREE_CODE (tree00) == MULT_EXPR)
9322 tree tree0;
9323 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9324 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9329 bit_rotate:
9330 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9331 is a rotate of A by C1 bits. */
9332 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9333 is a rotate of A by B bits. */
9335 enum tree_code code0, code1;
9336 code0 = TREE_CODE (arg0);
9337 code1 = TREE_CODE (arg1);
9338 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9339 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9340 && operand_equal_p (TREE_OPERAND (arg0, 0),
9341 TREE_OPERAND (arg1, 0), 0)
9342 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9344 tree tree01, tree11;
9345 enum tree_code code01, code11;
9347 tree01 = TREE_OPERAND (arg0, 1);
9348 tree11 = TREE_OPERAND (arg1, 1);
9349 STRIP_NOPS (tree01);
9350 STRIP_NOPS (tree11);
9351 code01 = TREE_CODE (tree01);
9352 code11 = TREE_CODE (tree11);
9353 if (code01 == INTEGER_CST
9354 && code11 == INTEGER_CST
9355 && TREE_INT_CST_HIGH (tree01) == 0
9356 && TREE_INT_CST_HIGH (tree11) == 0
9357 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9358 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9359 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9360 code0 == LSHIFT_EXPR ? tree01 : tree11);
9361 else if (code11 == MINUS_EXPR)
9363 tree tree110, tree111;
9364 tree110 = TREE_OPERAND (tree11, 0);
9365 tree111 = TREE_OPERAND (tree11, 1);
9366 STRIP_NOPS (tree110);
9367 STRIP_NOPS (tree111);
9368 if (TREE_CODE (tree110) == INTEGER_CST
9369 && 0 == compare_tree_int (tree110,
9370 TYPE_PRECISION
9371 (TREE_TYPE (TREE_OPERAND
9372 (arg0, 0))))
9373 && operand_equal_p (tree01, tree111, 0))
9374 return build2 ((code0 == LSHIFT_EXPR
9375 ? LROTATE_EXPR
9376 : RROTATE_EXPR),
9377 type, TREE_OPERAND (arg0, 0), tree01);
9379 else if (code01 == MINUS_EXPR)
9381 tree tree010, tree011;
9382 tree010 = TREE_OPERAND (tree01, 0);
9383 tree011 = TREE_OPERAND (tree01, 1);
9384 STRIP_NOPS (tree010);
9385 STRIP_NOPS (tree011);
9386 if (TREE_CODE (tree010) == INTEGER_CST
9387 && 0 == compare_tree_int (tree010,
9388 TYPE_PRECISION
9389 (TREE_TYPE (TREE_OPERAND
9390 (arg0, 0))))
9391 && operand_equal_p (tree11, tree011, 0))
9392 return build2 ((code0 != LSHIFT_EXPR
9393 ? LROTATE_EXPR
9394 : RROTATE_EXPR),
9395 type, TREE_OPERAND (arg0, 0), tree11);
9400 associate:
9401 /* In most languages, can't associate operations on floats through
9402 parentheses. Rather than remember where the parentheses were, we
9403 don't associate floats at all, unless the user has specified
9404 -funsafe-math-optimizations. */
9406 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9408 tree var0, con0, lit0, minus_lit0;
9409 tree var1, con1, lit1, minus_lit1;
9410 bool ok = true;
9412 /* Split both trees into variables, constants, and literals. Then
9413 associate each group together, the constants with literals,
9414 then the result with variables. This increases the chances of
9415 literals being recombined later and of generating relocatable
9416 expressions for the sum of a constant and literal. */
9417 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9418 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9419 code == MINUS_EXPR);
9421 /* With undefined overflow we can only associate constants
9422 with one variable. */
9423 if ((POINTER_TYPE_P (type)
9424 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9425 && var0 && var1)
9427 tree tmp0 = var0;
9428 tree tmp1 = var1;
9430 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9431 tmp0 = TREE_OPERAND (tmp0, 0);
9432 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9433 tmp1 = TREE_OPERAND (tmp1, 0);
9434 /* The only case we can still associate with two variables
9435 is if they are the same, modulo negation. */
9436 if (!operand_equal_p (tmp0, tmp1, 0))
9437 ok = false;
9440 /* Only do something if we found more than two objects. Otherwise,
9441 nothing has changed and we risk infinite recursion. */
9442 if (ok
9443 && (2 < ((var0 != 0) + (var1 != 0)
9444 + (con0 != 0) + (con1 != 0)
9445 + (lit0 != 0) + (lit1 != 0)
9446 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9448 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9449 if (code == MINUS_EXPR)
9450 code = PLUS_EXPR;
9452 var0 = associate_trees (var0, var1, code, type);
9453 con0 = associate_trees (con0, con1, code, type);
9454 lit0 = associate_trees (lit0, lit1, code, type);
9455 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9457 /* Preserve the MINUS_EXPR if the negative part of the literal is
9458 greater than the positive part. Otherwise, the multiplicative
9459 folding code (i.e extract_muldiv) may be fooled in case
9460 unsigned constants are subtracted, like in the following
9461 example: ((X*2 + 4) - 8U)/2. */
9462 if (minus_lit0 && lit0)
9464 if (TREE_CODE (lit0) == INTEGER_CST
9465 && TREE_CODE (minus_lit0) == INTEGER_CST
9466 && tree_int_cst_lt (lit0, minus_lit0))
9468 minus_lit0 = associate_trees (minus_lit0, lit0,
9469 MINUS_EXPR, type);
9470 lit0 = 0;
9472 else
9474 lit0 = associate_trees (lit0, minus_lit0,
9475 MINUS_EXPR, type);
9476 minus_lit0 = 0;
9479 if (minus_lit0)
9481 if (con0 == 0)
9482 return fold_convert (type,
9483 associate_trees (var0, minus_lit0,
9484 MINUS_EXPR, type));
9485 else
9487 con0 = associate_trees (con0, minus_lit0,
9488 MINUS_EXPR, type);
9489 return fold_convert (type,
9490 associate_trees (var0, con0,
9491 PLUS_EXPR, type));
9495 con0 = associate_trees (con0, lit0, code, type);
9496 return fold_convert (type, associate_trees (var0, con0,
9497 code, type));
9501 return NULL_TREE;
9503 case MINUS_EXPR:
9504 /* A - (-B) -> A + B */
9505 if (TREE_CODE (arg1) == NEGATE_EXPR)
9506 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9507 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9508 if (TREE_CODE (arg0) == NEGATE_EXPR
9509 && (FLOAT_TYPE_P (type)
9510 || INTEGRAL_TYPE_P (type))
9511 && negate_expr_p (arg1)
9512 && reorder_operands_p (arg0, arg1))
9513 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9514 TREE_OPERAND (arg0, 0));
9515 /* Convert -A - 1 to ~A. */
9516 if (INTEGRAL_TYPE_P (type)
9517 && TREE_CODE (arg0) == NEGATE_EXPR
9518 && integer_onep (arg1)
9519 && !TYPE_OVERFLOW_TRAPS (type))
9520 return fold_build1 (BIT_NOT_EXPR, type,
9521 fold_convert (type, TREE_OPERAND (arg0, 0)));
9523 /* Convert -1 - A to ~A. */
9524 if (INTEGRAL_TYPE_P (type)
9525 && integer_all_onesp (arg0))
9526 return fold_build1 (BIT_NOT_EXPR, type, op1);
9528 if (! FLOAT_TYPE_P (type))
9530 if (integer_zerop (arg0))
9531 return negate_expr (fold_convert (type, arg1));
9532 if (integer_zerop (arg1))
9533 return non_lvalue (fold_convert (type, arg0));
9535 /* Fold A - (A & B) into ~B & A. */
9536 if (!TREE_SIDE_EFFECTS (arg0)
9537 && TREE_CODE (arg1) == BIT_AND_EXPR)
9539 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9540 return fold_build2 (BIT_AND_EXPR, type,
9541 fold_build1 (BIT_NOT_EXPR, type,
9542 TREE_OPERAND (arg1, 0)),
9543 arg0);
9544 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9545 return fold_build2 (BIT_AND_EXPR, type,
9546 fold_build1 (BIT_NOT_EXPR, type,
9547 TREE_OPERAND (arg1, 1)),
9548 arg0);
9551 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9552 any power of 2 minus 1. */
9553 if (TREE_CODE (arg0) == BIT_AND_EXPR
9554 && TREE_CODE (arg1) == BIT_AND_EXPR
9555 && operand_equal_p (TREE_OPERAND (arg0, 0),
9556 TREE_OPERAND (arg1, 0), 0))
9558 tree mask0 = TREE_OPERAND (arg0, 1);
9559 tree mask1 = TREE_OPERAND (arg1, 1);
9560 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9562 if (operand_equal_p (tem, mask1, 0))
9564 tem = fold_build2 (BIT_XOR_EXPR, type,
9565 TREE_OPERAND (arg0, 0), mask1);
9566 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9571 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9572 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9573 return non_lvalue (fold_convert (type, arg0));
9575 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9576 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9577 (-ARG1 + ARG0) reduces to -ARG1. */
9578 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9579 return negate_expr (fold_convert (type, arg1));
9581 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9582 __complex__ ( x, -y ). This is not the same for SNaNs or if
9583 signed zeros are involved. */
9584 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9585 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9586 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9588 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9589 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9590 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9591 bool arg0rz = false, arg0iz = false;
9592 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9593 || (arg0i && (arg0iz = real_zerop (arg0i))))
9595 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9596 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9597 if (arg0rz && arg1i && real_zerop (arg1i))
9599 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9600 arg1r ? arg1r
9601 : build1 (REALPART_EXPR, rtype, arg1));
9602 tree ip = arg0i ? arg0i
9603 : build1 (IMAGPART_EXPR, rtype, arg0);
9604 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9606 else if (arg0iz && arg1r && real_zerop (arg1r))
9608 tree rp = arg0r ? arg0r
9609 : build1 (REALPART_EXPR, rtype, arg0);
9610 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9611 arg1i ? arg1i
9612 : build1 (IMAGPART_EXPR, rtype, arg1));
9613 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9618 /* Fold &x - &x. This can happen from &x.foo - &x.
9619 This is unsafe for certain floats even in non-IEEE formats.
9620 In IEEE, it is unsafe because it does wrong for NaNs.
9621 Also note that operand_equal_p is always false if an operand
9622 is volatile. */
9624 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9625 && operand_equal_p (arg0, arg1, 0))
9626 return fold_convert (type, integer_zero_node);
9628 /* A - B -> A + (-B) if B is easily negatable. */
9629 if (negate_expr_p (arg1)
9630 && ((FLOAT_TYPE_P (type)
9631 /* Avoid this transformation if B is a positive REAL_CST. */
9632 && (TREE_CODE (arg1) != REAL_CST
9633 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9634 || INTEGRAL_TYPE_P (type)))
9635 return fold_build2 (PLUS_EXPR, type,
9636 fold_convert (type, arg0),
9637 fold_convert (type, negate_expr (arg1)));
9639 /* Try folding difference of addresses. */
9641 HOST_WIDE_INT diff;
9643 if ((TREE_CODE (arg0) == ADDR_EXPR
9644 || TREE_CODE (arg1) == ADDR_EXPR)
9645 && ptr_difference_const (arg0, arg1, &diff))
9646 return build_int_cst_type (type, diff);
9649 /* Fold &a[i] - &a[j] to i-j. */
9650 if (TREE_CODE (arg0) == ADDR_EXPR
9651 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9652 && TREE_CODE (arg1) == ADDR_EXPR
9653 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9655 tree aref0 = TREE_OPERAND (arg0, 0);
9656 tree aref1 = TREE_OPERAND (arg1, 0);
9657 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9658 TREE_OPERAND (aref1, 0), 0))
9660 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9661 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9662 tree esz = array_ref_element_size (aref0);
9663 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9664 return fold_build2 (MULT_EXPR, type, diff,
9665 fold_convert (type, esz));
9670 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9671 of the array. Loop optimizer sometimes produce this type of
9672 expressions. */
9673 if (TREE_CODE (arg0) == ADDR_EXPR)
9675 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9676 if (tem)
9677 return fold_convert (type, tem);
9680 if (flag_unsafe_math_optimizations
9681 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9682 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9683 && (tem = distribute_real_division (code, type, arg0, arg1)))
9684 return tem;
9686 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9687 same or one. */
9688 if ((TREE_CODE (arg0) == MULT_EXPR
9689 || TREE_CODE (arg1) == MULT_EXPR)
9690 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9692 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9693 if (tem)
9694 return tem;
9697 goto associate;
9699 case MULT_EXPR:
9700 /* (-A) * (-B) -> A * B */
9701 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9702 return fold_build2 (MULT_EXPR, type,
9703 fold_convert (type, TREE_OPERAND (arg0, 0)),
9704 fold_convert (type, negate_expr (arg1)));
9705 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9706 return fold_build2 (MULT_EXPR, type,
9707 fold_convert (type, negate_expr (arg0)),
9708 fold_convert (type, TREE_OPERAND (arg1, 0)));
9710 if (! FLOAT_TYPE_P (type))
9712 if (integer_zerop (arg1))
9713 return omit_one_operand (type, arg1, arg0);
9714 if (integer_onep (arg1))
9715 return non_lvalue (fold_convert (type, arg0));
9716 /* Transform x * -1 into -x. */
9717 if (integer_all_onesp (arg1))
9718 return fold_convert (type, negate_expr (arg0));
9719 /* Transform x * -C into -x * C if x is easily negatable. */
9720 if (TREE_CODE (arg1) == INTEGER_CST
9721 && tree_int_cst_sgn (arg1) == -1
9722 && negate_expr_p (arg0)
9723 && (tem = negate_expr (arg1)) != arg1
9724 && !TREE_OVERFLOW (tem))
9725 return fold_build2 (MULT_EXPR, type,
9726 negate_expr (arg0), tem);
9728 /* (a * (1 << b)) is (a << b) */
9729 if (TREE_CODE (arg1) == LSHIFT_EXPR
9730 && integer_onep (TREE_OPERAND (arg1, 0)))
9731 return fold_build2 (LSHIFT_EXPR, type, arg0,
9732 TREE_OPERAND (arg1, 1));
9733 if (TREE_CODE (arg0) == LSHIFT_EXPR
9734 && integer_onep (TREE_OPERAND (arg0, 0)))
9735 return fold_build2 (LSHIFT_EXPR, type, arg1,
9736 TREE_OPERAND (arg0, 1));
9738 strict_overflow_p = false;
9739 if (TREE_CODE (arg1) == INTEGER_CST
9740 && 0 != (tem = extract_muldiv (op0,
9741 fold_convert (type, arg1),
9742 code, NULL_TREE,
9743 &strict_overflow_p)))
9745 if (strict_overflow_p)
9746 fold_overflow_warning (("assuming signed overflow does not "
9747 "occur when simplifying "
9748 "multiplication"),
9749 WARN_STRICT_OVERFLOW_MISC);
9750 return fold_convert (type, tem);
9753 /* Optimize z * conj(z) for integer complex numbers. */
9754 if (TREE_CODE (arg0) == CONJ_EXPR
9755 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9756 return fold_mult_zconjz (type, arg1);
9757 if (TREE_CODE (arg1) == CONJ_EXPR
9758 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9759 return fold_mult_zconjz (type, arg0);
9761 else
9763 /* Maybe fold x * 0 to 0. The expressions aren't the same
9764 when x is NaN, since x * 0 is also NaN. Nor are they the
9765 same in modes with signed zeros, since multiplying a
9766 negative value by 0 gives -0, not +0. */
9767 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9768 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9769 && real_zerop (arg1))
9770 return omit_one_operand (type, arg1, arg0);
9771 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9772 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9773 && real_onep (arg1))
9774 return non_lvalue (fold_convert (type, arg0));
9776 /* Transform x * -1.0 into -x. */
9777 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9778 && real_minus_onep (arg1))
9779 return fold_convert (type, negate_expr (arg0));
9781 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9782 if (flag_unsafe_math_optimizations
9783 && TREE_CODE (arg0) == RDIV_EXPR
9784 && TREE_CODE (arg1) == REAL_CST
9785 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9787 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9788 arg1, 0);
9789 if (tem)
9790 return fold_build2 (RDIV_EXPR, type, tem,
9791 TREE_OPERAND (arg0, 1));
9794 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9795 if (operand_equal_p (arg0, arg1, 0))
9797 tree tem = fold_strip_sign_ops (arg0);
9798 if (tem != NULL_TREE)
9800 tem = fold_convert (type, tem);
9801 return fold_build2 (MULT_EXPR, type, tem, tem);
9805 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9806 This is not the same for NaNs or if signed zeros are
9807 involved. */
9808 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9809 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9810 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9811 && TREE_CODE (arg1) == COMPLEX_CST
9812 && real_zerop (TREE_REALPART (arg1)))
9814 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9815 if (real_onep (TREE_IMAGPART (arg1)))
9816 return fold_build2 (COMPLEX_EXPR, type,
9817 negate_expr (fold_build1 (IMAGPART_EXPR,
9818 rtype, arg0)),
9819 fold_build1 (REALPART_EXPR, rtype, arg0));
9820 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9821 return fold_build2 (COMPLEX_EXPR, type,
9822 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9823 negate_expr (fold_build1 (REALPART_EXPR,
9824 rtype, arg0)));
9827 /* Optimize z * conj(z) for floating point complex numbers.
9828 Guarded by flag_unsafe_math_optimizations as non-finite
9829 imaginary components don't produce scalar results. */
9830 if (flag_unsafe_math_optimizations
9831 && TREE_CODE (arg0) == CONJ_EXPR
9832 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9833 return fold_mult_zconjz (type, arg1);
9834 if (flag_unsafe_math_optimizations
9835 && TREE_CODE (arg1) == CONJ_EXPR
9836 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9837 return fold_mult_zconjz (type, arg0);
9839 if (flag_unsafe_math_optimizations)
9841 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9842 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9844 /* Optimizations of root(...)*root(...). */
9845 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9847 tree rootfn, arg;
9848 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9849 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9851 /* Optimize sqrt(x)*sqrt(x) as x. */
9852 if (BUILTIN_SQRT_P (fcode0)
9853 && operand_equal_p (arg00, arg10, 0)
9854 && ! HONOR_SNANS (TYPE_MODE (type)))
9855 return arg00;
9857 /* Optimize root(x)*root(y) as root(x*y). */
9858 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9859 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9860 return build_call_expr (rootfn, 1, arg);
9863 /* Optimize expN(x)*expN(y) as expN(x+y). */
9864 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9866 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9867 tree arg = fold_build2 (PLUS_EXPR, type,
9868 CALL_EXPR_ARG (arg0, 0),
9869 CALL_EXPR_ARG (arg1, 0));
9870 return build_call_expr (expfn, 1, arg);
9873 /* Optimizations of pow(...)*pow(...). */
9874 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9875 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9876 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9878 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9879 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9880 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9881 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9883 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9884 if (operand_equal_p (arg01, arg11, 0))
9886 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9887 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9888 return build_call_expr (powfn, 2, arg, arg01);
9891 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9892 if (operand_equal_p (arg00, arg10, 0))
9894 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9895 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9896 return build_call_expr (powfn, 2, arg00, arg);
9900 /* Optimize tan(x)*cos(x) as sin(x). */
9901 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9902 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9903 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9904 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9905 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9906 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9907 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
9908 CALL_EXPR_ARG (arg1, 0), 0))
9910 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9912 if (sinfn != NULL_TREE)
9913 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
9916 /* Optimize x*pow(x,c) as pow(x,c+1). */
9917 if (fcode1 == BUILT_IN_POW
9918 || fcode1 == BUILT_IN_POWF
9919 || fcode1 == BUILT_IN_POWL)
9921 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9922 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9923 if (TREE_CODE (arg11) == REAL_CST
9924 && !TREE_OVERFLOW (arg11)
9925 && operand_equal_p (arg0, arg10, 0))
9927 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
9928 REAL_VALUE_TYPE c;
9929 tree arg;
9931 c = TREE_REAL_CST (arg11);
9932 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9933 arg = build_real (type, c);
9934 return build_call_expr (powfn, 2, arg0, arg);
9938 /* Optimize pow(x,c)*x as pow(x,c+1). */
9939 if (fcode0 == BUILT_IN_POW
9940 || fcode0 == BUILT_IN_POWF
9941 || fcode0 == BUILT_IN_POWL)
9943 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9944 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9945 if (TREE_CODE (arg01) == REAL_CST
9946 && !TREE_OVERFLOW (arg01)
9947 && operand_equal_p (arg1, arg00, 0))
9949 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9950 REAL_VALUE_TYPE c;
9951 tree arg;
9953 c = TREE_REAL_CST (arg01);
9954 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9955 arg = build_real (type, c);
9956 return build_call_expr (powfn, 2, arg1, arg);
9960 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9961 if (! optimize_size
9962 && operand_equal_p (arg0, arg1, 0))
9964 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9966 if (powfn)
9968 tree arg = build_real (type, dconst2);
9969 return build_call_expr (powfn, 2, arg0, arg);
9974 goto associate;
9976 case BIT_IOR_EXPR:
9977 bit_ior:
9978 if (integer_all_onesp (arg1))
9979 return omit_one_operand (type, arg1, arg0);
9980 if (integer_zerop (arg1))
9981 return non_lvalue (fold_convert (type, arg0));
9982 if (operand_equal_p (arg0, arg1, 0))
9983 return non_lvalue (fold_convert (type, arg0));
9985 /* ~X | X is -1. */
9986 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9987 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9989 t1 = build_int_cst_type (type, -1);
9990 return omit_one_operand (type, t1, arg1);
9993 /* X | ~X is -1. */
9994 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9995 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9997 t1 = build_int_cst_type (type, -1);
9998 return omit_one_operand (type, t1, arg0);
10001 /* Canonicalize (X & C1) | C2. */
10002 if (TREE_CODE (arg0) == BIT_AND_EXPR
10003 && TREE_CODE (arg1) == INTEGER_CST
10004 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10006 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10007 int width = TYPE_PRECISION (type);
10008 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10009 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10010 hi2 = TREE_INT_CST_HIGH (arg1);
10011 lo2 = TREE_INT_CST_LOW (arg1);
10013 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10014 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10015 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10017 if (width > HOST_BITS_PER_WIDE_INT)
10019 mhi = (unsigned HOST_WIDE_INT) -1
10020 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10021 mlo = -1;
10023 else
10025 mhi = 0;
10026 mlo = (unsigned HOST_WIDE_INT) -1
10027 >> (HOST_BITS_PER_WIDE_INT - width);
10030 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10031 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10032 return fold_build2 (BIT_IOR_EXPR, type,
10033 TREE_OPERAND (arg0, 0), arg1);
10035 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10036 hi1 &= mhi;
10037 lo1 &= mlo;
10038 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10039 return fold_build2 (BIT_IOR_EXPR, type,
10040 fold_build2 (BIT_AND_EXPR, type,
10041 TREE_OPERAND (arg0, 0),
10042 build_int_cst_wide (type,
10043 lo1 & ~lo2,
10044 hi1 & ~hi2)),
10045 arg1);
10048 /* (X & Y) | Y is (X, Y). */
10049 if (TREE_CODE (arg0) == BIT_AND_EXPR
10050 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10051 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10052 /* (X & Y) | X is (Y, X). */
10053 if (TREE_CODE (arg0) == BIT_AND_EXPR
10054 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10055 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10056 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10057 /* X | (X & Y) is (Y, X). */
10058 if (TREE_CODE (arg1) == BIT_AND_EXPR
10059 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10060 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10061 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10062 /* X | (Y & X) is (Y, X). */
10063 if (TREE_CODE (arg1) == BIT_AND_EXPR
10064 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10065 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10066 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10068 t1 = distribute_bit_expr (code, type, arg0, arg1);
10069 if (t1 != NULL_TREE)
10070 return t1;
10072 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10074 This results in more efficient code for machines without a NAND
10075 instruction. Combine will canonicalize to the first form
10076 which will allow use of NAND instructions provided by the
10077 backend if they exist. */
10078 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10079 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10081 return fold_build1 (BIT_NOT_EXPR, type,
10082 build2 (BIT_AND_EXPR, type,
10083 TREE_OPERAND (arg0, 0),
10084 TREE_OPERAND (arg1, 0)));
10087 /* See if this can be simplified into a rotate first. If that
10088 is unsuccessful continue in the association code. */
10089 goto bit_rotate;
10091 case BIT_XOR_EXPR:
10092 if (integer_zerop (arg1))
10093 return non_lvalue (fold_convert (type, arg0));
10094 if (integer_all_onesp (arg1))
10095 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10096 if (operand_equal_p (arg0, arg1, 0))
10097 return omit_one_operand (type, integer_zero_node, arg0);
10099 /* ~X ^ X is -1. */
10100 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10101 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10103 t1 = build_int_cst_type (type, -1);
10104 return omit_one_operand (type, t1, arg1);
10107 /* X ^ ~X is -1. */
10108 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10109 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10111 t1 = build_int_cst_type (type, -1);
10112 return omit_one_operand (type, t1, arg0);
10115 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10116 with a constant, and the two constants have no bits in common,
10117 we should treat this as a BIT_IOR_EXPR since this may produce more
10118 simplifications. */
10119 if (TREE_CODE (arg0) == BIT_AND_EXPR
10120 && TREE_CODE (arg1) == BIT_AND_EXPR
10121 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10122 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10123 && integer_zerop (const_binop (BIT_AND_EXPR,
10124 TREE_OPERAND (arg0, 1),
10125 TREE_OPERAND (arg1, 1), 0)))
10127 code = BIT_IOR_EXPR;
10128 goto bit_ior;
10131 /* (X | Y) ^ X -> Y & ~ X*/
10132 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10135 tree t2 = TREE_OPERAND (arg0, 1);
10136 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10137 arg1);
10138 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10139 fold_convert (type, t1));
10140 return t1;
10143 /* (Y | X) ^ X -> Y & ~ X*/
10144 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10145 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10147 tree t2 = TREE_OPERAND (arg0, 0);
10148 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10149 arg1);
10150 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10151 fold_convert (type, t1));
10152 return t1;
10155 /* X ^ (X | Y) -> Y & ~ X*/
10156 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10157 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10159 tree t2 = TREE_OPERAND (arg1, 1);
10160 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10161 arg0);
10162 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10163 fold_convert (type, t1));
10164 return t1;
10167 /* X ^ (Y | X) -> Y & ~ X*/
10168 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10169 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10171 tree t2 = TREE_OPERAND (arg1, 0);
10172 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10173 arg0);
10174 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10175 fold_convert (type, t1));
10176 return t1;
10179 /* Convert ~X ^ ~Y to X ^ Y. */
10180 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10181 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10182 return fold_build2 (code, type,
10183 fold_convert (type, TREE_OPERAND (arg0, 0)),
10184 fold_convert (type, TREE_OPERAND (arg1, 0)));
10186 /* Convert ~X ^ C to X ^ ~C. */
10187 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10188 && TREE_CODE (arg1) == INTEGER_CST)
10189 return fold_build2 (code, type,
10190 fold_convert (type, TREE_OPERAND (arg0, 0)),
10191 fold_build1 (BIT_NOT_EXPR, type, arg1));
10193 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10194 if (TREE_CODE (arg0) == BIT_AND_EXPR
10195 && integer_onep (TREE_OPERAND (arg0, 1))
10196 && integer_onep (arg1))
10197 return fold_build2 (EQ_EXPR, type, arg0,
10198 build_int_cst (TREE_TYPE (arg0), 0));
10200 /* Fold (X & Y) ^ Y as ~X & Y. */
10201 if (TREE_CODE (arg0) == BIT_AND_EXPR
10202 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10204 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10205 return fold_build2 (BIT_AND_EXPR, type,
10206 fold_build1 (BIT_NOT_EXPR, type, tem),
10207 fold_convert (type, arg1));
10209 /* Fold (X & Y) ^ X as ~Y & X. */
10210 if (TREE_CODE (arg0) == BIT_AND_EXPR
10211 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10212 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10214 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10215 return fold_build2 (BIT_AND_EXPR, type,
10216 fold_build1 (BIT_NOT_EXPR, type, tem),
10217 fold_convert (type, arg1));
10219 /* Fold X ^ (X & Y) as X & ~Y. */
10220 if (TREE_CODE (arg1) == BIT_AND_EXPR
10221 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10223 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10224 return fold_build2 (BIT_AND_EXPR, type,
10225 fold_convert (type, arg0),
10226 fold_build1 (BIT_NOT_EXPR, type, tem));
10228 /* Fold X ^ (Y & X) as ~Y & X. */
10229 if (TREE_CODE (arg1) == BIT_AND_EXPR
10230 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10231 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10233 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10234 return fold_build2 (BIT_AND_EXPR, type,
10235 fold_build1 (BIT_NOT_EXPR, type, tem),
10236 fold_convert (type, arg0));
10239 /* See if this can be simplified into a rotate first. If that
10240 is unsuccessful continue in the association code. */
10241 goto bit_rotate;
10243 case BIT_AND_EXPR:
10244 if (integer_all_onesp (arg1))
10245 return non_lvalue (fold_convert (type, arg0));
10246 if (integer_zerop (arg1))
10247 return omit_one_operand (type, arg1, arg0);
10248 if (operand_equal_p (arg0, arg1, 0))
10249 return non_lvalue (fold_convert (type, arg0));
10251 /* ~X & X is always zero. */
10252 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10253 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10254 return omit_one_operand (type, integer_zero_node, arg1);
10256 /* X & ~X is always zero. */
10257 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10258 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10259 return omit_one_operand (type, integer_zero_node, arg0);
10261 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10262 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10263 && TREE_CODE (arg1) == INTEGER_CST
10264 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10265 return fold_build2 (BIT_IOR_EXPR, type,
10266 fold_build2 (BIT_AND_EXPR, type,
10267 TREE_OPERAND (arg0, 0), arg1),
10268 fold_build2 (BIT_AND_EXPR, type,
10269 TREE_OPERAND (arg0, 1), arg1));
10271 /* (X | Y) & Y is (X, Y). */
10272 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10273 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10274 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10275 /* (X | Y) & X is (Y, X). */
10276 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10277 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10278 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10279 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10280 /* X & (X | Y) is (Y, X). */
10281 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10282 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10283 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10284 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10285 /* X & (Y | X) is (Y, X). */
10286 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10287 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10288 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10289 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10291 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10292 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10293 && integer_onep (TREE_OPERAND (arg0, 1))
10294 && integer_onep (arg1))
10296 tem = TREE_OPERAND (arg0, 0);
10297 return fold_build2 (EQ_EXPR, type,
10298 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10299 build_int_cst (TREE_TYPE (tem), 1)),
10300 build_int_cst (TREE_TYPE (tem), 0));
10302 /* Fold ~X & 1 as (X & 1) == 0. */
10303 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10304 && integer_onep (arg1))
10306 tem = TREE_OPERAND (arg0, 0);
10307 return fold_build2 (EQ_EXPR, type,
10308 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10309 build_int_cst (TREE_TYPE (tem), 1)),
10310 build_int_cst (TREE_TYPE (tem), 0));
10313 /* Fold (X ^ Y) & Y as ~X & Y. */
10314 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10315 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10317 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10318 return fold_build2 (BIT_AND_EXPR, type,
10319 fold_build1 (BIT_NOT_EXPR, type, tem),
10320 fold_convert (type, arg1));
10322 /* Fold (X ^ Y) & X as ~Y & X. */
10323 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10324 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10325 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10327 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10328 return fold_build2 (BIT_AND_EXPR, type,
10329 fold_build1 (BIT_NOT_EXPR, type, tem),
10330 fold_convert (type, arg1));
10332 /* Fold X & (X ^ Y) as X & ~Y. */
10333 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10334 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10336 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10337 return fold_build2 (BIT_AND_EXPR, type,
10338 fold_convert (type, arg0),
10339 fold_build1 (BIT_NOT_EXPR, type, tem));
10341 /* Fold X & (Y ^ X) as ~Y & X. */
10342 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10343 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10344 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10346 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10347 return fold_build2 (BIT_AND_EXPR, type,
10348 fold_build1 (BIT_NOT_EXPR, type, tem),
10349 fold_convert (type, arg0));
10352 t1 = distribute_bit_expr (code, type, arg0, arg1);
10353 if (t1 != NULL_TREE)
10354 return t1;
10355 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10356 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10357 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10359 unsigned int prec
10360 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10362 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10363 && (~TREE_INT_CST_LOW (arg1)
10364 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10365 return fold_convert (type, TREE_OPERAND (arg0, 0));
10368 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10370 This results in more efficient code for machines without a NOR
10371 instruction. Combine will canonicalize to the first form
10372 which will allow use of NOR instructions provided by the
10373 backend if they exist. */
10374 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10375 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10377 return fold_build1 (BIT_NOT_EXPR, type,
10378 build2 (BIT_IOR_EXPR, type,
10379 TREE_OPERAND (arg0, 0),
10380 TREE_OPERAND (arg1, 0)));
10383 goto associate;
10385 case RDIV_EXPR:
10386 /* Don't touch a floating-point divide by zero unless the mode
10387 of the constant can represent infinity. */
10388 if (TREE_CODE (arg1) == REAL_CST
10389 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10390 && real_zerop (arg1))
10391 return NULL_TREE;
10393 /* Optimize A / A to 1.0 if we don't care about
10394 NaNs or Infinities. Skip the transformation
10395 for non-real operands. */
10396 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10397 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10398 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10399 && operand_equal_p (arg0, arg1, 0))
10401 tree r = build_real (TREE_TYPE (arg0), dconst1);
10403 return omit_two_operands (type, r, arg0, arg1);
10406 /* The complex version of the above A / A optimization. */
10407 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10408 && operand_equal_p (arg0, arg1, 0))
10410 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10411 if (! HONOR_NANS (TYPE_MODE (elem_type))
10412 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10414 tree r = build_real (elem_type, dconst1);
10415 /* omit_two_operands will call fold_convert for us. */
10416 return omit_two_operands (type, r, arg0, arg1);
10420 /* (-A) / (-B) -> A / B */
10421 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10422 return fold_build2 (RDIV_EXPR, type,
10423 TREE_OPERAND (arg0, 0),
10424 negate_expr (arg1));
10425 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10426 return fold_build2 (RDIV_EXPR, type,
10427 negate_expr (arg0),
10428 TREE_OPERAND (arg1, 0));
10430 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10431 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10432 && real_onep (arg1))
10433 return non_lvalue (fold_convert (type, arg0));
10435 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10436 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10437 && real_minus_onep (arg1))
10438 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10440 /* If ARG1 is a constant, we can convert this to a multiply by the
10441 reciprocal. This does not have the same rounding properties,
10442 so only do this if -funsafe-math-optimizations. We can actually
10443 always safely do it if ARG1 is a power of two, but it's hard to
10444 tell if it is or not in a portable manner. */
10445 if (TREE_CODE (arg1) == REAL_CST)
10447 if (flag_unsafe_math_optimizations
10448 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10449 arg1, 0)))
10450 return fold_build2 (MULT_EXPR, type, arg0, tem);
10451 /* Find the reciprocal if optimizing and the result is exact. */
10452 if (optimize)
10454 REAL_VALUE_TYPE r;
10455 r = TREE_REAL_CST (arg1);
10456 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10458 tem = build_real (type, r);
10459 return fold_build2 (MULT_EXPR, type,
10460 fold_convert (type, arg0), tem);
10464 /* Convert A/B/C to A/(B*C). */
10465 if (flag_unsafe_math_optimizations
10466 && TREE_CODE (arg0) == RDIV_EXPR)
10467 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10468 fold_build2 (MULT_EXPR, type,
10469 TREE_OPERAND (arg0, 1), arg1));
10471 /* Convert A/(B/C) to (A/B)*C. */
10472 if (flag_unsafe_math_optimizations
10473 && TREE_CODE (arg1) == RDIV_EXPR)
10474 return fold_build2 (MULT_EXPR, type,
10475 fold_build2 (RDIV_EXPR, type, arg0,
10476 TREE_OPERAND (arg1, 0)),
10477 TREE_OPERAND (arg1, 1));
10479 /* Convert C1/(X*C2) into (C1/C2)/X. */
10480 if (flag_unsafe_math_optimizations
10481 && TREE_CODE (arg1) == MULT_EXPR
10482 && TREE_CODE (arg0) == REAL_CST
10483 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10485 tree tem = const_binop (RDIV_EXPR, arg0,
10486 TREE_OPERAND (arg1, 1), 0);
10487 if (tem)
10488 return fold_build2 (RDIV_EXPR, type, tem,
10489 TREE_OPERAND (arg1, 0));
10492 if (flag_unsafe_math_optimizations)
10494 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10495 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10497 /* Optimize sin(x)/cos(x) as tan(x). */
10498 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10499 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10500 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10501 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10502 CALL_EXPR_ARG (arg1, 0), 0))
10504 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10506 if (tanfn != NULL_TREE)
10507 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10510 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10511 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10512 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10513 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10514 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10515 CALL_EXPR_ARG (arg1, 0), 0))
10517 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10519 if (tanfn != NULL_TREE)
10521 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10522 return fold_build2 (RDIV_EXPR, type,
10523 build_real (type, dconst1), tmp);
10527 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10528 NaNs or Infinities. */
10529 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10530 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10531 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10533 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10534 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10536 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10537 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10538 && operand_equal_p (arg00, arg01, 0))
10540 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10542 if (cosfn != NULL_TREE)
10543 return build_call_expr (cosfn, 1, arg00);
10547 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10548 NaNs or Infinities. */
10549 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10550 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10551 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10553 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10554 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10556 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10557 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10558 && operand_equal_p (arg00, arg01, 0))
10560 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10562 if (cosfn != NULL_TREE)
10564 tree tmp = build_call_expr (cosfn, 1, arg00);
10565 return fold_build2 (RDIV_EXPR, type,
10566 build_real (type, dconst1),
10567 tmp);
10572 /* Optimize pow(x,c)/x as pow(x,c-1). */
10573 if (fcode0 == BUILT_IN_POW
10574 || fcode0 == BUILT_IN_POWF
10575 || fcode0 == BUILT_IN_POWL)
10577 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10578 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10579 if (TREE_CODE (arg01) == REAL_CST
10580 && !TREE_OVERFLOW (arg01)
10581 && operand_equal_p (arg1, arg00, 0))
10583 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10584 REAL_VALUE_TYPE c;
10585 tree arg;
10587 c = TREE_REAL_CST (arg01);
10588 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10589 arg = build_real (type, c);
10590 return build_call_expr (powfn, 2, arg1, arg);
10594 /* Optimize x/expN(y) into x*expN(-y). */
10595 if (BUILTIN_EXPONENT_P (fcode1))
10597 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10598 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10599 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10600 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10603 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10604 if (fcode1 == BUILT_IN_POW
10605 || fcode1 == BUILT_IN_POWF
10606 || fcode1 == BUILT_IN_POWL)
10608 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10609 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10610 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10611 tree neg11 = fold_convert (type, negate_expr (arg11));
10612 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10613 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10616 return NULL_TREE;
10618 case TRUNC_DIV_EXPR:
10619 case FLOOR_DIV_EXPR:
10620 /* Simplify A / (B << N) where A and B are positive and B is
10621 a power of 2, to A >> (N + log2(B)). */
10622 strict_overflow_p = false;
10623 if (TREE_CODE (arg1) == LSHIFT_EXPR
10624 && (TYPE_UNSIGNED (type)
10625 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10627 tree sval = TREE_OPERAND (arg1, 0);
10628 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10630 tree sh_cnt = TREE_OPERAND (arg1, 1);
10631 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10633 if (strict_overflow_p)
10634 fold_overflow_warning (("assuming signed overflow does not "
10635 "occur when simplifying A / (B << N)"),
10636 WARN_STRICT_OVERFLOW_MISC);
10638 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10639 sh_cnt, build_int_cst (NULL_TREE, pow2));
10640 return fold_build2 (RSHIFT_EXPR, type,
10641 fold_convert (type, arg0), sh_cnt);
10644 /* Fall thru */
10646 case ROUND_DIV_EXPR:
10647 case CEIL_DIV_EXPR:
10648 case EXACT_DIV_EXPR:
10649 if (integer_onep (arg1))
10650 return non_lvalue (fold_convert (type, arg0));
10651 if (integer_zerop (arg1))
10652 return NULL_TREE;
10653 /* X / -1 is -X. */
10654 if (!TYPE_UNSIGNED (type)
10655 && TREE_CODE (arg1) == INTEGER_CST
10656 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10657 && TREE_INT_CST_HIGH (arg1) == -1)
10658 return fold_convert (type, negate_expr (arg0));
10660 /* Convert -A / -B to A / B when the type is signed and overflow is
10661 undefined. */
10662 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10663 && TREE_CODE (arg0) == NEGATE_EXPR
10664 && negate_expr_p (arg1))
10666 if (INTEGRAL_TYPE_P (type))
10667 fold_overflow_warning (("assuming signed overflow does not occur "
10668 "when distributing negation across "
10669 "division"),
10670 WARN_STRICT_OVERFLOW_MISC);
10671 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10672 negate_expr (arg1));
10674 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10675 && TREE_CODE (arg1) == NEGATE_EXPR
10676 && negate_expr_p (arg0))
10678 if (INTEGRAL_TYPE_P (type))
10679 fold_overflow_warning (("assuming signed overflow does not occur "
10680 "when distributing negation across "
10681 "division"),
10682 WARN_STRICT_OVERFLOW_MISC);
10683 return fold_build2 (code, type, negate_expr (arg0),
10684 TREE_OPERAND (arg1, 0));
10687 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10688 operation, EXACT_DIV_EXPR.
10690 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10691 At one time others generated faster code, it's not clear if they do
10692 after the last round to changes to the DIV code in expmed.c. */
10693 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10694 && multiple_of_p (type, arg0, arg1))
10695 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10697 strict_overflow_p = false;
10698 if (TREE_CODE (arg1) == INTEGER_CST
10699 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10700 &strict_overflow_p)))
10702 if (strict_overflow_p)
10703 fold_overflow_warning (("assuming signed overflow does not occur "
10704 "when simplifying division"),
10705 WARN_STRICT_OVERFLOW_MISC);
10706 return fold_convert (type, tem);
10709 return NULL_TREE;
10711 case CEIL_MOD_EXPR:
10712 case FLOOR_MOD_EXPR:
10713 case ROUND_MOD_EXPR:
10714 case TRUNC_MOD_EXPR:
10715 /* X % 1 is always zero, but be sure to preserve any side
10716 effects in X. */
10717 if (integer_onep (arg1))
10718 return omit_one_operand (type, integer_zero_node, arg0);
10720 /* X % 0, return X % 0 unchanged so that we can get the
10721 proper warnings and errors. */
10722 if (integer_zerop (arg1))
10723 return NULL_TREE;
10725 /* 0 % X is always zero, but be sure to preserve any side
10726 effects in X. Place this after checking for X == 0. */
10727 if (integer_zerop (arg0))
10728 return omit_one_operand (type, integer_zero_node, arg1);
10730 /* X % -1 is zero. */
10731 if (!TYPE_UNSIGNED (type)
10732 && TREE_CODE (arg1) == INTEGER_CST
10733 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10734 && TREE_INT_CST_HIGH (arg1) == -1)
10735 return omit_one_operand (type, integer_zero_node, arg0);
10737 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10738 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10739 strict_overflow_p = false;
10740 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10741 && (TYPE_UNSIGNED (type)
10742 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10744 tree c = arg1;
10745 /* Also optimize A % (C << N) where C is a power of 2,
10746 to A & ((C << N) - 1). */
10747 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10748 c = TREE_OPERAND (arg1, 0);
10750 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10752 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10753 build_int_cst (TREE_TYPE (arg1), 1));
10754 if (strict_overflow_p)
10755 fold_overflow_warning (("assuming signed overflow does not "
10756 "occur when simplifying "
10757 "X % (power of two)"),
10758 WARN_STRICT_OVERFLOW_MISC);
10759 return fold_build2 (BIT_AND_EXPR, type,
10760 fold_convert (type, arg0),
10761 fold_convert (type, mask));
10765 /* X % -C is the same as X % C. */
10766 if (code == TRUNC_MOD_EXPR
10767 && !TYPE_UNSIGNED (type)
10768 && TREE_CODE (arg1) == INTEGER_CST
10769 && !TREE_OVERFLOW (arg1)
10770 && TREE_INT_CST_HIGH (arg1) < 0
10771 && !TYPE_OVERFLOW_TRAPS (type)
10772 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10773 && !sign_bit_p (arg1, arg1))
10774 return fold_build2 (code, type, fold_convert (type, arg0),
10775 fold_convert (type, negate_expr (arg1)));
10777 /* X % -Y is the same as X % Y. */
10778 if (code == TRUNC_MOD_EXPR
10779 && !TYPE_UNSIGNED (type)
10780 && TREE_CODE (arg1) == NEGATE_EXPR
10781 && !TYPE_OVERFLOW_TRAPS (type))
10782 return fold_build2 (code, type, fold_convert (type, arg0),
10783 fold_convert (type, TREE_OPERAND (arg1, 0)));
10785 if (TREE_CODE (arg1) == INTEGER_CST
10786 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10787 &strict_overflow_p)))
10789 if (strict_overflow_p)
10790 fold_overflow_warning (("assuming signed overflow does not occur "
10791 "when simplifying modulos"),
10792 WARN_STRICT_OVERFLOW_MISC);
10793 return fold_convert (type, tem);
10796 return NULL_TREE;
10798 case LROTATE_EXPR:
10799 case RROTATE_EXPR:
10800 if (integer_all_onesp (arg0))
10801 return omit_one_operand (type, arg0, arg1);
10802 goto shift;
10804 case RSHIFT_EXPR:
10805 /* Optimize -1 >> x for arithmetic right shifts. */
10806 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10807 return omit_one_operand (type, arg0, arg1);
10808 /* ... fall through ... */
10810 case LSHIFT_EXPR:
10811 shift:
10812 if (integer_zerop (arg1))
10813 return non_lvalue (fold_convert (type, arg0));
10814 if (integer_zerop (arg0))
10815 return omit_one_operand (type, arg0, arg1);
10817 /* Since negative shift count is not well-defined,
10818 don't try to compute it in the compiler. */
10819 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10820 return NULL_TREE;
10822 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10823 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10824 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10825 && host_integerp (TREE_OPERAND (arg0, 1), false)
10826 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10828 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10829 + TREE_INT_CST_LOW (arg1));
10831 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10832 being well defined. */
10833 if (low >= TYPE_PRECISION (type))
10835 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10836 low = low % TYPE_PRECISION (type);
10837 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10838 return build_int_cst (type, 0);
10839 else
10840 low = TYPE_PRECISION (type) - 1;
10843 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10844 build_int_cst (type, low));
10847 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10848 into x & ((unsigned)-1 >> c) for unsigned types. */
10849 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10850 || (TYPE_UNSIGNED (type)
10851 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10852 && host_integerp (arg1, false)
10853 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10854 && host_integerp (TREE_OPERAND (arg0, 1), false)
10855 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10857 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10858 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10859 tree lshift;
10860 tree arg00;
10862 if (low0 == low1)
10864 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10866 lshift = build_int_cst (type, -1);
10867 lshift = int_const_binop (code, lshift, arg1, 0);
10869 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10873 /* Rewrite an LROTATE_EXPR by a constant into an
10874 RROTATE_EXPR by a new constant. */
10875 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10877 tree tem = build_int_cst (TREE_TYPE (arg1),
10878 GET_MODE_BITSIZE (TYPE_MODE (type)));
10879 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10880 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10883 /* If we have a rotate of a bit operation with the rotate count and
10884 the second operand of the bit operation both constant,
10885 permute the two operations. */
10886 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10887 && (TREE_CODE (arg0) == BIT_AND_EXPR
10888 || TREE_CODE (arg0) == BIT_IOR_EXPR
10889 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10890 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10891 return fold_build2 (TREE_CODE (arg0), type,
10892 fold_build2 (code, type,
10893 TREE_OPERAND (arg0, 0), arg1),
10894 fold_build2 (code, type,
10895 TREE_OPERAND (arg0, 1), arg1));
10897 /* Two consecutive rotates adding up to the width of the mode can
10898 be ignored. */
10899 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10900 && TREE_CODE (arg0) == RROTATE_EXPR
10901 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10902 && TREE_INT_CST_HIGH (arg1) == 0
10903 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10904 && ((TREE_INT_CST_LOW (arg1)
10905 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10906 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10907 return TREE_OPERAND (arg0, 0);
10909 return NULL_TREE;
10911 case MIN_EXPR:
10912 if (operand_equal_p (arg0, arg1, 0))
10913 return omit_one_operand (type, arg0, arg1);
10914 if (INTEGRAL_TYPE_P (type)
10915 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10916 return omit_one_operand (type, arg1, arg0);
10917 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10918 if (tem)
10919 return tem;
10920 goto associate;
10922 case MAX_EXPR:
10923 if (operand_equal_p (arg0, arg1, 0))
10924 return omit_one_operand (type, arg0, arg1);
10925 if (INTEGRAL_TYPE_P (type)
10926 && TYPE_MAX_VALUE (type)
10927 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10928 return omit_one_operand (type, arg1, arg0);
10929 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10930 if (tem)
10931 return tem;
10932 goto associate;
10934 case TRUTH_ANDIF_EXPR:
10935 /* Note that the operands of this must be ints
10936 and their values must be 0 or 1.
10937 ("true" is a fixed value perhaps depending on the language.) */
10938 /* If first arg is constant zero, return it. */
10939 if (integer_zerop (arg0))
10940 return fold_convert (type, arg0);
10941 case TRUTH_AND_EXPR:
10942 /* If either arg is constant true, drop it. */
10943 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10944 return non_lvalue (fold_convert (type, arg1));
10945 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10946 /* Preserve sequence points. */
10947 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10948 return non_lvalue (fold_convert (type, arg0));
10949 /* If second arg is constant zero, result is zero, but first arg
10950 must be evaluated. */
10951 if (integer_zerop (arg1))
10952 return omit_one_operand (type, arg1, arg0);
10953 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10954 case will be handled here. */
10955 if (integer_zerop (arg0))
10956 return omit_one_operand (type, arg0, arg1);
10958 /* !X && X is always false. */
10959 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10960 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10961 return omit_one_operand (type, integer_zero_node, arg1);
10962 /* X && !X is always false. */
10963 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10964 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10965 return omit_one_operand (type, integer_zero_node, arg0);
10967 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10968 means A >= Y && A != MAX, but in this case we know that
10969 A < X <= MAX. */
10971 if (!TREE_SIDE_EFFECTS (arg0)
10972 && !TREE_SIDE_EFFECTS (arg1))
10974 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10975 if (tem && !operand_equal_p (tem, arg0, 0))
10976 return fold_build2 (code, type, tem, arg1);
10978 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10979 if (tem && !operand_equal_p (tem, arg1, 0))
10980 return fold_build2 (code, type, arg0, tem);
10983 truth_andor:
10984 /* We only do these simplifications if we are optimizing. */
10985 if (!optimize)
10986 return NULL_TREE;
10988 /* Check for things like (A || B) && (A || C). We can convert this
10989 to A || (B && C). Note that either operator can be any of the four
10990 truth and/or operations and the transformation will still be
10991 valid. Also note that we only care about order for the
10992 ANDIF and ORIF operators. If B contains side effects, this
10993 might change the truth-value of A. */
10994 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10995 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10996 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10997 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10998 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10999 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11001 tree a00 = TREE_OPERAND (arg0, 0);
11002 tree a01 = TREE_OPERAND (arg0, 1);
11003 tree a10 = TREE_OPERAND (arg1, 0);
11004 tree a11 = TREE_OPERAND (arg1, 1);
11005 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11006 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11007 && (code == TRUTH_AND_EXPR
11008 || code == TRUTH_OR_EXPR));
11010 if (operand_equal_p (a00, a10, 0))
11011 return fold_build2 (TREE_CODE (arg0), type, a00,
11012 fold_build2 (code, type, a01, a11));
11013 else if (commutative && operand_equal_p (a00, a11, 0))
11014 return fold_build2 (TREE_CODE (arg0), type, a00,
11015 fold_build2 (code, type, a01, a10));
11016 else if (commutative && operand_equal_p (a01, a10, 0))
11017 return fold_build2 (TREE_CODE (arg0), type, a01,
11018 fold_build2 (code, type, a00, a11));
11020 /* This case if tricky because we must either have commutative
11021 operators or else A10 must not have side-effects. */
11023 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11024 && operand_equal_p (a01, a11, 0))
11025 return fold_build2 (TREE_CODE (arg0), type,
11026 fold_build2 (code, type, a00, a10),
11027 a01);
11030 /* See if we can build a range comparison. */
11031 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11032 return tem;
11034 /* Check for the possibility of merging component references. If our
11035 lhs is another similar operation, try to merge its rhs with our
11036 rhs. Then try to merge our lhs and rhs. */
11037 if (TREE_CODE (arg0) == code
11038 && 0 != (tem = fold_truthop (code, type,
11039 TREE_OPERAND (arg0, 1), arg1)))
11040 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11042 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11043 return tem;
11045 return NULL_TREE;
11047 case TRUTH_ORIF_EXPR:
11048 /* Note that the operands of this must be ints
11049 and their values must be 0 or true.
11050 ("true" is a fixed value perhaps depending on the language.) */
11051 /* If first arg is constant true, return it. */
11052 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11053 return fold_convert (type, arg0);
11054 case TRUTH_OR_EXPR:
11055 /* If either arg is constant zero, drop it. */
11056 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11057 return non_lvalue (fold_convert (type, arg1));
11058 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11059 /* Preserve sequence points. */
11060 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11061 return non_lvalue (fold_convert (type, arg0));
11062 /* If second arg is constant true, result is true, but we must
11063 evaluate first arg. */
11064 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11065 return omit_one_operand (type, arg1, arg0);
11066 /* Likewise for first arg, but note this only occurs here for
11067 TRUTH_OR_EXPR. */
11068 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11069 return omit_one_operand (type, arg0, arg1);
11071 /* !X || X is always true. */
11072 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11073 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11074 return omit_one_operand (type, integer_one_node, arg1);
11075 /* X || !X is always true. */
11076 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11077 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11078 return omit_one_operand (type, integer_one_node, arg0);
11080 goto truth_andor;
11082 case TRUTH_XOR_EXPR:
11083 /* If the second arg is constant zero, drop it. */
11084 if (integer_zerop (arg1))
11085 return non_lvalue (fold_convert (type, arg0));
11086 /* If the second arg is constant true, this is a logical inversion. */
11087 if (integer_onep (arg1))
11089 /* Only call invert_truthvalue if operand is a truth value. */
11090 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11091 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11092 else
11093 tem = invert_truthvalue (arg0);
11094 return non_lvalue (fold_convert (type, tem));
11096 /* Identical arguments cancel to zero. */
11097 if (operand_equal_p (arg0, arg1, 0))
11098 return omit_one_operand (type, integer_zero_node, arg0);
11100 /* !X ^ X is always true. */
11101 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11102 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11103 return omit_one_operand (type, integer_one_node, arg1);
11105 /* X ^ !X is always true. */
11106 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11107 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11108 return omit_one_operand (type, integer_one_node, arg0);
11110 return NULL_TREE;
11112 case EQ_EXPR:
11113 case NE_EXPR:
11114 tem = fold_comparison (code, type, op0, op1);
11115 if (tem != NULL_TREE)
11116 return tem;
11118 /* bool_var != 0 becomes bool_var. */
11119 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11120 && code == NE_EXPR)
11121 return non_lvalue (fold_convert (type, arg0));
11123 /* bool_var == 1 becomes bool_var. */
11124 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11125 && code == EQ_EXPR)
11126 return non_lvalue (fold_convert (type, arg0));
11128 /* bool_var != 1 becomes !bool_var. */
11129 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11130 && code == NE_EXPR)
11131 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11133 /* bool_var == 0 becomes !bool_var. */
11134 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11135 && code == EQ_EXPR)
11136 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11138 /* If this is an equality comparison of the address of two non-weak,
11139 unaliased symbols neither of which are extern (since we do not
11140 have access to attributes for externs), then we know the result. */
11141 if (TREE_CODE (arg0) == ADDR_EXPR
11142 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11143 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11144 && ! lookup_attribute ("alias",
11145 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11146 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11147 && TREE_CODE (arg1) == ADDR_EXPR
11148 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11149 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11150 && ! lookup_attribute ("alias",
11151 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11152 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11154 /* We know that we're looking at the address of two
11155 non-weak, unaliased, static _DECL nodes.
11157 It is both wasteful and incorrect to call operand_equal_p
11158 to compare the two ADDR_EXPR nodes. It is wasteful in that
11159 all we need to do is test pointer equality for the arguments
11160 to the two ADDR_EXPR nodes. It is incorrect to use
11161 operand_equal_p as that function is NOT equivalent to a
11162 C equality test. It can in fact return false for two
11163 objects which would test as equal using the C equality
11164 operator. */
11165 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11166 return constant_boolean_node (equal
11167 ? code == EQ_EXPR : code != EQ_EXPR,
11168 type);
11171 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11172 a MINUS_EXPR of a constant, we can convert it into a comparison with
11173 a revised constant as long as no overflow occurs. */
11174 if (TREE_CODE (arg1) == INTEGER_CST
11175 && (TREE_CODE (arg0) == PLUS_EXPR
11176 || TREE_CODE (arg0) == MINUS_EXPR)
11177 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11178 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11179 ? MINUS_EXPR : PLUS_EXPR,
11180 fold_convert (TREE_TYPE (arg0), arg1),
11181 TREE_OPERAND (arg0, 1), 0))
11182 && !TREE_OVERFLOW (tem))
11183 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11185 /* Similarly for a NEGATE_EXPR. */
11186 if (TREE_CODE (arg0) == NEGATE_EXPR
11187 && TREE_CODE (arg1) == INTEGER_CST
11188 && 0 != (tem = negate_expr (arg1))
11189 && TREE_CODE (tem) == INTEGER_CST
11190 && !TREE_OVERFLOW (tem))
11191 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11193 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11194 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11195 && TREE_CODE (arg1) == INTEGER_CST
11196 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11197 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11198 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11199 fold_convert (TREE_TYPE (arg0), arg1),
11200 TREE_OPERAND (arg0, 1)));
11202 /* Transform comparisons of the form X +- C CMP X. */
11203 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11204 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11205 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11206 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11207 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11209 tree cst = TREE_OPERAND (arg0, 1);
11211 if (code == EQ_EXPR
11212 && !integer_zerop (cst))
11213 return omit_two_operands (type, boolean_false_node,
11214 TREE_OPERAND (arg0, 0), arg1);
11215 else
11216 return omit_two_operands (type, boolean_true_node,
11217 TREE_OPERAND (arg0, 0), arg1);
11220 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11221 for !=. Don't do this for ordered comparisons due to overflow. */
11222 if (TREE_CODE (arg0) == MINUS_EXPR
11223 && integer_zerop (arg1))
11224 return fold_build2 (code, type,
11225 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11227 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11228 if (TREE_CODE (arg0) == ABS_EXPR
11229 && (integer_zerop (arg1) || real_zerop (arg1)))
11230 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11232 /* If this is an EQ or NE comparison with zero and ARG0 is
11233 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11234 two operations, but the latter can be done in one less insn
11235 on machines that have only two-operand insns or on which a
11236 constant cannot be the first operand. */
11237 if (TREE_CODE (arg0) == BIT_AND_EXPR
11238 && integer_zerop (arg1))
11240 tree arg00 = TREE_OPERAND (arg0, 0);
11241 tree arg01 = TREE_OPERAND (arg0, 1);
11242 if (TREE_CODE (arg00) == LSHIFT_EXPR
11243 && integer_onep (TREE_OPERAND (arg00, 0)))
11244 return
11245 fold_build2 (code, type,
11246 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11247 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11248 arg01, TREE_OPERAND (arg00, 1)),
11249 fold_convert (TREE_TYPE (arg0),
11250 integer_one_node)),
11251 arg1);
11252 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11253 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11254 return
11255 fold_build2 (code, type,
11256 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11257 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11258 arg00, TREE_OPERAND (arg01, 1)),
11259 fold_convert (TREE_TYPE (arg0),
11260 integer_one_node)),
11261 arg1);
11264 /* If this is an NE or EQ comparison of zero against the result of a
11265 signed MOD operation whose second operand is a power of 2, make
11266 the MOD operation unsigned since it is simpler and equivalent. */
11267 if (integer_zerop (arg1)
11268 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11269 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11270 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11271 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11272 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11273 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11275 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
11276 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11277 fold_convert (newtype,
11278 TREE_OPERAND (arg0, 0)),
11279 fold_convert (newtype,
11280 TREE_OPERAND (arg0, 1)));
11282 return fold_build2 (code, type, newmod,
11283 fold_convert (newtype, arg1));
11286 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11287 C1 is a valid shift constant, and C2 is a power of two, i.e.
11288 a single bit. */
11289 if (TREE_CODE (arg0) == BIT_AND_EXPR
11290 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11291 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11292 == INTEGER_CST
11293 && integer_pow2p (TREE_OPERAND (arg0, 1))
11294 && integer_zerop (arg1))
11296 tree itype = TREE_TYPE (arg0);
11297 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11298 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11300 /* Check for a valid shift count. */
11301 if (TREE_INT_CST_HIGH (arg001) == 0
11302 && TREE_INT_CST_LOW (arg001) < prec)
11304 tree arg01 = TREE_OPERAND (arg0, 1);
11305 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11306 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11307 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11308 can be rewritten as (X & (C2 << C1)) != 0. */
11309 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11311 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11312 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11313 return fold_build2 (code, type, tem, arg1);
11315 /* Otherwise, for signed (arithmetic) shifts,
11316 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11317 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11318 else if (!TYPE_UNSIGNED (itype))
11319 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11320 arg000, build_int_cst (itype, 0));
11321 /* Otherwise, of unsigned (logical) shifts,
11322 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11323 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11324 else
11325 return omit_one_operand (type,
11326 code == EQ_EXPR ? integer_one_node
11327 : integer_zero_node,
11328 arg000);
11332 /* If this is an NE comparison of zero with an AND of one, remove the
11333 comparison since the AND will give the correct value. */
11334 if (code == NE_EXPR
11335 && integer_zerop (arg1)
11336 && TREE_CODE (arg0) == BIT_AND_EXPR
11337 && integer_onep (TREE_OPERAND (arg0, 1)))
11338 return fold_convert (type, arg0);
11340 /* If we have (A & C) == C where C is a power of 2, convert this into
11341 (A & C) != 0. Similarly for NE_EXPR. */
11342 if (TREE_CODE (arg0) == BIT_AND_EXPR
11343 && integer_pow2p (TREE_OPERAND (arg0, 1))
11344 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11345 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11346 arg0, fold_convert (TREE_TYPE (arg0),
11347 integer_zero_node));
11349 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11350 bit, then fold the expression into A < 0 or A >= 0. */
11351 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11352 if (tem)
11353 return tem;
11355 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11356 Similarly for NE_EXPR. */
11357 if (TREE_CODE (arg0) == BIT_AND_EXPR
11358 && TREE_CODE (arg1) == INTEGER_CST
11359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11361 tree notc = fold_build1 (BIT_NOT_EXPR,
11362 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11363 TREE_OPERAND (arg0, 1));
11364 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11365 arg1, notc);
11366 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11367 if (integer_nonzerop (dandnotc))
11368 return omit_one_operand (type, rslt, arg0);
11371 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11372 Similarly for NE_EXPR. */
11373 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11374 && TREE_CODE (arg1) == INTEGER_CST
11375 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11377 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11378 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11379 TREE_OPERAND (arg0, 1), notd);
11380 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11381 if (integer_nonzerop (candnotd))
11382 return omit_one_operand (type, rslt, arg0);
11385 /* If this is a comparison of a field, we may be able to simplify it. */
11386 if ((TREE_CODE (arg0) == COMPONENT_REF
11387 || TREE_CODE (arg0) == BIT_FIELD_REF)
11388 /* Handle the constant case even without -O
11389 to make sure the warnings are given. */
11390 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11392 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11393 if (t1)
11394 return t1;
11397 /* Optimize comparisons of strlen vs zero to a compare of the
11398 first character of the string vs zero. To wit,
11399 strlen(ptr) == 0 => *ptr == 0
11400 strlen(ptr) != 0 => *ptr != 0
11401 Other cases should reduce to one of these two (or a constant)
11402 due to the return value of strlen being unsigned. */
11403 if (TREE_CODE (arg0) == CALL_EXPR
11404 && integer_zerop (arg1))
11406 tree fndecl = get_callee_fndecl (arg0);
11408 if (fndecl
11409 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11410 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11411 && call_expr_nargs (arg0) == 1
11412 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11414 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11415 return fold_build2 (code, type, iref,
11416 build_int_cst (TREE_TYPE (iref), 0));
11420 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11421 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11422 if (TREE_CODE (arg0) == RSHIFT_EXPR
11423 && integer_zerop (arg1)
11424 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11426 tree arg00 = TREE_OPERAND (arg0, 0);
11427 tree arg01 = TREE_OPERAND (arg0, 1);
11428 tree itype = TREE_TYPE (arg00);
11429 if (TREE_INT_CST_HIGH (arg01) == 0
11430 && TREE_INT_CST_LOW (arg01)
11431 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11433 if (TYPE_UNSIGNED (itype))
11435 itype = lang_hooks.types.signed_type (itype);
11436 arg00 = fold_convert (itype, arg00);
11438 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11439 type, arg00, build_int_cst (itype, 0));
11443 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11444 if (integer_zerop (arg1)
11445 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11446 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11447 TREE_OPERAND (arg0, 1));
11449 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11450 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11451 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11452 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11453 build_int_cst (TREE_TYPE (arg1), 0));
11454 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11455 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11456 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11457 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11458 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11459 build_int_cst (TREE_TYPE (arg1), 0));
11461 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11462 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11463 && TREE_CODE (arg1) == INTEGER_CST
11464 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11465 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11466 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11467 TREE_OPERAND (arg0, 1), arg1));
11469 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11470 (X & C) == 0 when C is a single bit. */
11471 if (TREE_CODE (arg0) == BIT_AND_EXPR
11472 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11473 && integer_zerop (arg1)
11474 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11476 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11477 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11478 TREE_OPERAND (arg0, 1));
11479 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11480 type, tem, arg1);
11483 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11484 constant C is a power of two, i.e. a single bit. */
11485 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11486 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11487 && integer_zerop (arg1)
11488 && integer_pow2p (TREE_OPERAND (arg0, 1))
11489 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11490 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11492 tree arg00 = TREE_OPERAND (arg0, 0);
11493 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11494 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11497 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11498 when is C is a power of two, i.e. a single bit. */
11499 if (TREE_CODE (arg0) == BIT_AND_EXPR
11500 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11501 && integer_zerop (arg1)
11502 && integer_pow2p (TREE_OPERAND (arg0, 1))
11503 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11504 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11506 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11507 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11508 arg000, TREE_OPERAND (arg0, 1));
11509 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11510 tem, build_int_cst (TREE_TYPE (tem), 0));
11513 if (integer_zerop (arg1)
11514 && tree_expr_nonzero_p (arg0))
11516 tree res = constant_boolean_node (code==NE_EXPR, type);
11517 return omit_one_operand (type, res, arg0);
11520 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11521 if (TREE_CODE (arg0) == NEGATE_EXPR
11522 && TREE_CODE (arg1) == NEGATE_EXPR)
11523 return fold_build2 (code, type,
11524 TREE_OPERAND (arg0, 0),
11525 TREE_OPERAND (arg1, 0));
11527 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11528 if (TREE_CODE (arg0) == BIT_AND_EXPR
11529 && TREE_CODE (arg1) == BIT_AND_EXPR)
11531 tree arg00 = TREE_OPERAND (arg0, 0);
11532 tree arg01 = TREE_OPERAND (arg0, 1);
11533 tree arg10 = TREE_OPERAND (arg1, 0);
11534 tree arg11 = TREE_OPERAND (arg1, 1);
11535 tree itype = TREE_TYPE (arg0);
11537 if (operand_equal_p (arg01, arg11, 0))
11538 return fold_build2 (code, type,
11539 fold_build2 (BIT_AND_EXPR, itype,
11540 fold_build2 (BIT_XOR_EXPR, itype,
11541 arg00, arg10),
11542 arg01),
11543 build_int_cst (itype, 0));
11545 if (operand_equal_p (arg01, arg10, 0))
11546 return fold_build2 (code, type,
11547 fold_build2 (BIT_AND_EXPR, itype,
11548 fold_build2 (BIT_XOR_EXPR, itype,
11549 arg00, arg11),
11550 arg01),
11551 build_int_cst (itype, 0));
11553 if (operand_equal_p (arg00, arg11, 0))
11554 return fold_build2 (code, type,
11555 fold_build2 (BIT_AND_EXPR, itype,
11556 fold_build2 (BIT_XOR_EXPR, itype,
11557 arg01, arg10),
11558 arg00),
11559 build_int_cst (itype, 0));
11561 if (operand_equal_p (arg00, arg10, 0))
11562 return fold_build2 (code, type,
11563 fold_build2 (BIT_AND_EXPR, itype,
11564 fold_build2 (BIT_XOR_EXPR, itype,
11565 arg01, arg11),
11566 arg00),
11567 build_int_cst (itype, 0));
11570 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11571 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11573 tree arg00 = TREE_OPERAND (arg0, 0);
11574 tree arg01 = TREE_OPERAND (arg0, 1);
11575 tree arg10 = TREE_OPERAND (arg1, 0);
11576 tree arg11 = TREE_OPERAND (arg1, 1);
11577 tree itype = TREE_TYPE (arg0);
11579 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11580 operand_equal_p guarantees no side-effects so we don't need
11581 to use omit_one_operand on Z. */
11582 if (operand_equal_p (arg01, arg11, 0))
11583 return fold_build2 (code, type, arg00, arg10);
11584 if (operand_equal_p (arg01, arg10, 0))
11585 return fold_build2 (code, type, arg00, arg11);
11586 if (operand_equal_p (arg00, arg11, 0))
11587 return fold_build2 (code, type, arg01, arg10);
11588 if (operand_equal_p (arg00, arg10, 0))
11589 return fold_build2 (code, type, arg01, arg11);
11591 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11592 if (TREE_CODE (arg01) == INTEGER_CST
11593 && TREE_CODE (arg11) == INTEGER_CST)
11594 return fold_build2 (code, type,
11595 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11596 fold_build2 (BIT_XOR_EXPR, itype,
11597 arg01, arg11)),
11598 arg10);
11601 /* Attempt to simplify equality/inequality comparisons of complex
11602 values. Only lower the comparison if the result is known or
11603 can be simplified to a single scalar comparison. */
11604 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11605 || TREE_CODE (arg0) == COMPLEX_CST)
11606 && (TREE_CODE (arg1) == COMPLEX_EXPR
11607 || TREE_CODE (arg1) == COMPLEX_CST))
11609 tree real0, imag0, real1, imag1;
11610 tree rcond, icond;
11612 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11614 real0 = TREE_OPERAND (arg0, 0);
11615 imag0 = TREE_OPERAND (arg0, 1);
11617 else
11619 real0 = TREE_REALPART (arg0);
11620 imag0 = TREE_IMAGPART (arg0);
11623 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11625 real1 = TREE_OPERAND (arg1, 0);
11626 imag1 = TREE_OPERAND (arg1, 1);
11628 else
11630 real1 = TREE_REALPART (arg1);
11631 imag1 = TREE_IMAGPART (arg1);
11634 rcond = fold_binary (code, type, real0, real1);
11635 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11637 if (integer_zerop (rcond))
11639 if (code == EQ_EXPR)
11640 return omit_two_operands (type, boolean_false_node,
11641 imag0, imag1);
11642 return fold_build2 (NE_EXPR, type, imag0, imag1);
11644 else
11646 if (code == NE_EXPR)
11647 return omit_two_operands (type, boolean_true_node,
11648 imag0, imag1);
11649 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11653 icond = fold_binary (code, type, imag0, imag1);
11654 if (icond && TREE_CODE (icond) == INTEGER_CST)
11656 if (integer_zerop (icond))
11658 if (code == EQ_EXPR)
11659 return omit_two_operands (type, boolean_false_node,
11660 real0, real1);
11661 return fold_build2 (NE_EXPR, type, real0, real1);
11663 else
11665 if (code == NE_EXPR)
11666 return omit_two_operands (type, boolean_true_node,
11667 real0, real1);
11668 return fold_build2 (EQ_EXPR, type, real0, real1);
11673 return NULL_TREE;
11675 case LT_EXPR:
11676 case GT_EXPR:
11677 case LE_EXPR:
11678 case GE_EXPR:
11679 tem = fold_comparison (code, type, op0, op1);
11680 if (tem != NULL_TREE)
11681 return tem;
11683 /* Transform comparisons of the form X +- C CMP X. */
11684 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11685 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11686 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11687 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11688 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11689 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11691 tree arg01 = TREE_OPERAND (arg0, 1);
11692 enum tree_code code0 = TREE_CODE (arg0);
11693 int is_positive;
11695 if (TREE_CODE (arg01) == REAL_CST)
11696 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11697 else
11698 is_positive = tree_int_cst_sgn (arg01);
11700 /* (X - c) > X becomes false. */
11701 if (code == GT_EXPR
11702 && ((code0 == MINUS_EXPR && is_positive >= 0)
11703 || (code0 == PLUS_EXPR && is_positive <= 0)))
11705 if (TREE_CODE (arg01) == INTEGER_CST
11706 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11707 fold_overflow_warning (("assuming signed overflow does not "
11708 "occur when assuming that (X - c) > X "
11709 "is always false"),
11710 WARN_STRICT_OVERFLOW_ALL);
11711 return constant_boolean_node (0, type);
11714 /* Likewise (X + c) < X becomes false. */
11715 if (code == LT_EXPR
11716 && ((code0 == PLUS_EXPR && is_positive >= 0)
11717 || (code0 == MINUS_EXPR && is_positive <= 0)))
11719 if (TREE_CODE (arg01) == INTEGER_CST
11720 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11721 fold_overflow_warning (("assuming signed overflow does not "
11722 "occur when assuming that "
11723 "(X + c) < X is always false"),
11724 WARN_STRICT_OVERFLOW_ALL);
11725 return constant_boolean_node (0, type);
11728 /* Convert (X - c) <= X to true. */
11729 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11730 && code == LE_EXPR
11731 && ((code0 == MINUS_EXPR && is_positive >= 0)
11732 || (code0 == PLUS_EXPR && is_positive <= 0)))
11734 if (TREE_CODE (arg01) == INTEGER_CST
11735 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11736 fold_overflow_warning (("assuming signed overflow does not "
11737 "occur when assuming that "
11738 "(X - c) <= X is always true"),
11739 WARN_STRICT_OVERFLOW_ALL);
11740 return constant_boolean_node (1, type);
11743 /* Convert (X + c) >= X to true. */
11744 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11745 && code == GE_EXPR
11746 && ((code0 == PLUS_EXPR && is_positive >= 0)
11747 || (code0 == MINUS_EXPR && is_positive <= 0)))
11749 if (TREE_CODE (arg01) == INTEGER_CST
11750 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11751 fold_overflow_warning (("assuming signed overflow does not "
11752 "occur when assuming that "
11753 "(X + c) >= X is always true"),
11754 WARN_STRICT_OVERFLOW_ALL);
11755 return constant_boolean_node (1, type);
11758 if (TREE_CODE (arg01) == INTEGER_CST)
11760 /* Convert X + c > X and X - c < X to true for integers. */
11761 if (code == GT_EXPR
11762 && ((code0 == PLUS_EXPR && is_positive > 0)
11763 || (code0 == MINUS_EXPR && is_positive < 0)))
11765 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11766 fold_overflow_warning (("assuming signed overflow does "
11767 "not occur when assuming that "
11768 "(X + c) > X is always true"),
11769 WARN_STRICT_OVERFLOW_ALL);
11770 return constant_boolean_node (1, type);
11773 if (code == LT_EXPR
11774 && ((code0 == MINUS_EXPR && is_positive > 0)
11775 || (code0 == PLUS_EXPR && is_positive < 0)))
11777 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11778 fold_overflow_warning (("assuming signed overflow does "
11779 "not occur when assuming that "
11780 "(X - c) < X is always true"),
11781 WARN_STRICT_OVERFLOW_ALL);
11782 return constant_boolean_node (1, type);
11785 /* Convert X + c <= X and X - c >= X to false for integers. */
11786 if (code == LE_EXPR
11787 && ((code0 == PLUS_EXPR && is_positive > 0)
11788 || (code0 == MINUS_EXPR && is_positive < 0)))
11790 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11791 fold_overflow_warning (("assuming signed overflow does "
11792 "not occur when assuming that "
11793 "(X + c) <= X is always false"),
11794 WARN_STRICT_OVERFLOW_ALL);
11795 return constant_boolean_node (0, type);
11798 if (code == GE_EXPR
11799 && ((code0 == MINUS_EXPR && is_positive > 0)
11800 || (code0 == PLUS_EXPR && is_positive < 0)))
11802 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11803 fold_overflow_warning (("assuming signed overflow does "
11804 "not occur when assuming that "
11805 "(X - c) >= X is always true"),
11806 WARN_STRICT_OVERFLOW_ALL);
11807 return constant_boolean_node (0, type);
11812 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11813 This transformation affects the cases which are handled in later
11814 optimizations involving comparisons with non-negative constants. */
11815 if (TREE_CODE (arg1) == INTEGER_CST
11816 && TREE_CODE (arg0) != INTEGER_CST
11817 && tree_int_cst_sgn (arg1) > 0)
11819 if (code == GE_EXPR)
11821 arg1 = const_binop (MINUS_EXPR, arg1,
11822 build_int_cst (TREE_TYPE (arg1), 1), 0);
11823 return fold_build2 (GT_EXPR, type, arg0,
11824 fold_convert (TREE_TYPE (arg0), arg1));
11826 if (code == LT_EXPR)
11828 arg1 = const_binop (MINUS_EXPR, arg1,
11829 build_int_cst (TREE_TYPE (arg1), 1), 0);
11830 return fold_build2 (LE_EXPR, type, arg0,
11831 fold_convert (TREE_TYPE (arg0), arg1));
11835 /* Comparisons with the highest or lowest possible integer of
11836 the specified precision will have known values. */
11838 tree arg1_type = TREE_TYPE (arg1);
11839 unsigned int width = TYPE_PRECISION (arg1_type);
11841 if (TREE_CODE (arg1) == INTEGER_CST
11842 && !TREE_OVERFLOW (arg1)
11843 && width <= 2 * HOST_BITS_PER_WIDE_INT
11844 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11846 HOST_WIDE_INT signed_max_hi;
11847 unsigned HOST_WIDE_INT signed_max_lo;
11848 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11850 if (width <= HOST_BITS_PER_WIDE_INT)
11852 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11853 - 1;
11854 signed_max_hi = 0;
11855 max_hi = 0;
11857 if (TYPE_UNSIGNED (arg1_type))
11859 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11860 min_lo = 0;
11861 min_hi = 0;
11863 else
11865 max_lo = signed_max_lo;
11866 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11867 min_hi = -1;
11870 else
11872 width -= HOST_BITS_PER_WIDE_INT;
11873 signed_max_lo = -1;
11874 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11875 - 1;
11876 max_lo = -1;
11877 min_lo = 0;
11879 if (TYPE_UNSIGNED (arg1_type))
11881 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11882 min_hi = 0;
11884 else
11886 max_hi = signed_max_hi;
11887 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11891 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11892 && TREE_INT_CST_LOW (arg1) == max_lo)
11893 switch (code)
11895 case GT_EXPR:
11896 return omit_one_operand (type, integer_zero_node, arg0);
11898 case GE_EXPR:
11899 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11901 case LE_EXPR:
11902 return omit_one_operand (type, integer_one_node, arg0);
11904 case LT_EXPR:
11905 return fold_build2 (NE_EXPR, type, arg0, arg1);
11907 /* The GE_EXPR and LT_EXPR cases above are not normally
11908 reached because of previous transformations. */
11910 default:
11911 break;
11913 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11914 == max_hi
11915 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11916 switch (code)
11918 case GT_EXPR:
11919 arg1 = const_binop (PLUS_EXPR, arg1,
11920 build_int_cst (TREE_TYPE (arg1), 1), 0);
11921 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11922 case LE_EXPR:
11923 arg1 = const_binop (PLUS_EXPR, arg1,
11924 build_int_cst (TREE_TYPE (arg1), 1), 0);
11925 return fold_build2 (NE_EXPR, type, arg0, arg1);
11926 default:
11927 break;
11929 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11930 == min_hi
11931 && TREE_INT_CST_LOW (arg1) == min_lo)
11932 switch (code)
11934 case LT_EXPR:
11935 return omit_one_operand (type, integer_zero_node, arg0);
11937 case LE_EXPR:
11938 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11940 case GE_EXPR:
11941 return omit_one_operand (type, integer_one_node, arg0);
11943 case GT_EXPR:
11944 return fold_build2 (NE_EXPR, type, op0, op1);
11946 default:
11947 break;
11949 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11950 == min_hi
11951 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11952 switch (code)
11954 case GE_EXPR:
11955 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11956 return fold_build2 (NE_EXPR, type, arg0, arg1);
11957 case LT_EXPR:
11958 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11959 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11960 default:
11961 break;
11964 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11965 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11966 && TYPE_UNSIGNED (arg1_type)
11967 /* We will flip the signedness of the comparison operator
11968 associated with the mode of arg1, so the sign bit is
11969 specified by this mode. Check that arg1 is the signed
11970 max associated with this sign bit. */
11971 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11972 /* signed_type does not work on pointer types. */
11973 && INTEGRAL_TYPE_P (arg1_type))
11975 /* The following case also applies to X < signed_max+1
11976 and X >= signed_max+1 because previous transformations. */
11977 if (code == LE_EXPR || code == GT_EXPR)
11979 tree st0, st1;
11980 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11981 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11982 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11983 type, fold_convert (st0, arg0),
11984 build_int_cst (st1, 0));
11990 /* If we are comparing an ABS_EXPR with a constant, we can
11991 convert all the cases into explicit comparisons, but they may
11992 well not be faster than doing the ABS and one comparison.
11993 But ABS (X) <= C is a range comparison, which becomes a subtraction
11994 and a comparison, and is probably faster. */
11995 if (code == LE_EXPR
11996 && TREE_CODE (arg1) == INTEGER_CST
11997 && TREE_CODE (arg0) == ABS_EXPR
11998 && ! TREE_SIDE_EFFECTS (arg0)
11999 && (0 != (tem = negate_expr (arg1)))
12000 && TREE_CODE (tem) == INTEGER_CST
12001 && !TREE_OVERFLOW (tem))
12002 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12003 build2 (GE_EXPR, type,
12004 TREE_OPERAND (arg0, 0), tem),
12005 build2 (LE_EXPR, type,
12006 TREE_OPERAND (arg0, 0), arg1));
12008 /* Convert ABS_EXPR<x> >= 0 to true. */
12009 strict_overflow_p = false;
12010 if (code == GE_EXPR
12011 && (integer_zerop (arg1)
12012 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12013 && real_zerop (arg1)))
12014 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12016 if (strict_overflow_p)
12017 fold_overflow_warning (("assuming signed overflow does not occur "
12018 "when simplifying comparison of "
12019 "absolute value and zero"),
12020 WARN_STRICT_OVERFLOW_CONDITIONAL);
12021 return omit_one_operand (type, integer_one_node, arg0);
12024 /* Convert ABS_EXPR<x> < 0 to false. */
12025 strict_overflow_p = false;
12026 if (code == LT_EXPR
12027 && (integer_zerop (arg1) || real_zerop (arg1))
12028 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12030 if (strict_overflow_p)
12031 fold_overflow_warning (("assuming signed overflow does not occur "
12032 "when simplifying comparison of "
12033 "absolute value and zero"),
12034 WARN_STRICT_OVERFLOW_CONDITIONAL);
12035 return omit_one_operand (type, integer_zero_node, arg0);
12038 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12039 and similarly for >= into !=. */
12040 if ((code == LT_EXPR || code == GE_EXPR)
12041 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12042 && TREE_CODE (arg1) == LSHIFT_EXPR
12043 && integer_onep (TREE_OPERAND (arg1, 0)))
12044 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12045 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12046 TREE_OPERAND (arg1, 1)),
12047 build_int_cst (TREE_TYPE (arg0), 0));
12049 if ((code == LT_EXPR || code == GE_EXPR)
12050 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12051 && (TREE_CODE (arg1) == NOP_EXPR
12052 || TREE_CODE (arg1) == CONVERT_EXPR)
12053 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12054 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12055 return
12056 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12057 fold_convert (TREE_TYPE (arg0),
12058 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12059 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12060 1))),
12061 build_int_cst (TREE_TYPE (arg0), 0));
12063 return NULL_TREE;
12065 case UNORDERED_EXPR:
12066 case ORDERED_EXPR:
12067 case UNLT_EXPR:
12068 case UNLE_EXPR:
12069 case UNGT_EXPR:
12070 case UNGE_EXPR:
12071 case UNEQ_EXPR:
12072 case LTGT_EXPR:
12073 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12075 t1 = fold_relational_const (code, type, arg0, arg1);
12076 if (t1 != NULL_TREE)
12077 return t1;
12080 /* If the first operand is NaN, the result is constant. */
12081 if (TREE_CODE (arg0) == REAL_CST
12082 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12083 && (code != LTGT_EXPR || ! flag_trapping_math))
12085 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12086 ? integer_zero_node
12087 : integer_one_node;
12088 return omit_one_operand (type, t1, arg1);
12091 /* If the second operand is NaN, the result is constant. */
12092 if (TREE_CODE (arg1) == REAL_CST
12093 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12094 && (code != LTGT_EXPR || ! flag_trapping_math))
12096 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12097 ? integer_zero_node
12098 : integer_one_node;
12099 return omit_one_operand (type, t1, arg0);
12102 /* Simplify unordered comparison of something with itself. */
12103 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12104 && operand_equal_p (arg0, arg1, 0))
12105 return constant_boolean_node (1, type);
12107 if (code == LTGT_EXPR
12108 && !flag_trapping_math
12109 && operand_equal_p (arg0, arg1, 0))
12110 return constant_boolean_node (0, type);
12112 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12114 tree targ0 = strip_float_extensions (arg0);
12115 tree targ1 = strip_float_extensions (arg1);
12116 tree newtype = TREE_TYPE (targ0);
12118 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12119 newtype = TREE_TYPE (targ1);
12121 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12122 return fold_build2 (code, type, fold_convert (newtype, targ0),
12123 fold_convert (newtype, targ1));
12126 return NULL_TREE;
12128 case COMPOUND_EXPR:
12129 /* When pedantic, a compound expression can be neither an lvalue
12130 nor an integer constant expression. */
12131 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12132 return NULL_TREE;
12133 /* Don't let (0, 0) be null pointer constant. */
12134 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12135 : fold_convert (type, arg1);
12136 return pedantic_non_lvalue (tem);
12138 case COMPLEX_EXPR:
12139 if ((TREE_CODE (arg0) == REAL_CST
12140 && TREE_CODE (arg1) == REAL_CST)
12141 || (TREE_CODE (arg0) == INTEGER_CST
12142 && TREE_CODE (arg1) == INTEGER_CST))
12143 return build_complex (type, arg0, arg1);
12144 return NULL_TREE;
12146 case ASSERT_EXPR:
12147 /* An ASSERT_EXPR should never be passed to fold_binary. */
12148 gcc_unreachable ();
12150 default:
12151 return NULL_TREE;
12152 } /* switch (code) */
12155 /* Callback for walk_tree, looking for LABEL_EXPR.
12156 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12157 Do not check the sub-tree of GOTO_EXPR. */
12159 static tree
12160 contains_label_1 (tree *tp,
12161 int *walk_subtrees,
12162 void *data ATTRIBUTE_UNUSED)
12164 switch (TREE_CODE (*tp))
12166 case LABEL_EXPR:
12167 return *tp;
12168 case GOTO_EXPR:
12169 *walk_subtrees = 0;
12170 /* no break */
12171 default:
12172 return NULL_TREE;
12176 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12177 accessible from outside the sub-tree. Returns NULL_TREE if no
12178 addressable label is found. */
12180 static bool
12181 contains_label_p (tree st)
12183 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12186 /* Fold a ternary expression of code CODE and type TYPE with operands
12187 OP0, OP1, and OP2. Return the folded expression if folding is
12188 successful. Otherwise, return NULL_TREE. */
12190 tree
12191 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12193 tree tem;
12194 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12195 enum tree_code_class kind = TREE_CODE_CLASS (code);
12197 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12198 && TREE_CODE_LENGTH (code) == 3);
12200 /* Strip any conversions that don't change the mode. This is safe
12201 for every expression, except for a comparison expression because
12202 its signedness is derived from its operands. So, in the latter
12203 case, only strip conversions that don't change the signedness.
12205 Note that this is done as an internal manipulation within the
12206 constant folder, in order to find the simplest representation of
12207 the arguments so that their form can be studied. In any cases,
12208 the appropriate type conversions should be put back in the tree
12209 that will get out of the constant folder. */
12210 if (op0)
12212 arg0 = op0;
12213 STRIP_NOPS (arg0);
12216 if (op1)
12218 arg1 = op1;
12219 STRIP_NOPS (arg1);
12222 switch (code)
12224 case COMPONENT_REF:
12225 if (TREE_CODE (arg0) == CONSTRUCTOR
12226 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12228 unsigned HOST_WIDE_INT idx;
12229 tree field, value;
12230 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12231 if (field == arg1)
12232 return value;
12234 return NULL_TREE;
12236 case COND_EXPR:
12237 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12238 so all simple results must be passed through pedantic_non_lvalue. */
12239 if (TREE_CODE (arg0) == INTEGER_CST)
12241 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12242 tem = integer_zerop (arg0) ? op2 : op1;
12243 /* Only optimize constant conditions when the selected branch
12244 has the same type as the COND_EXPR. This avoids optimizing
12245 away "c ? x : throw", where the throw has a void type.
12246 Avoid throwing away that operand which contains label. */
12247 if ((!TREE_SIDE_EFFECTS (unused_op)
12248 || !contains_label_p (unused_op))
12249 && (! VOID_TYPE_P (TREE_TYPE (tem))
12250 || VOID_TYPE_P (type)))
12251 return pedantic_non_lvalue (tem);
12252 return NULL_TREE;
12254 if (operand_equal_p (arg1, op2, 0))
12255 return pedantic_omit_one_operand (type, arg1, arg0);
12257 /* If we have A op B ? A : C, we may be able to convert this to a
12258 simpler expression, depending on the operation and the values
12259 of B and C. Signed zeros prevent all of these transformations,
12260 for reasons given above each one.
12262 Also try swapping the arguments and inverting the conditional. */
12263 if (COMPARISON_CLASS_P (arg0)
12264 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12265 arg1, TREE_OPERAND (arg0, 1))
12266 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12268 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12269 if (tem)
12270 return tem;
12273 if (COMPARISON_CLASS_P (arg0)
12274 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12275 op2,
12276 TREE_OPERAND (arg0, 1))
12277 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12279 tem = fold_truth_not_expr (arg0);
12280 if (tem && COMPARISON_CLASS_P (tem))
12282 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12283 if (tem)
12284 return tem;
12288 /* If the second operand is simpler than the third, swap them
12289 since that produces better jump optimization results. */
12290 if (truth_value_p (TREE_CODE (arg0))
12291 && tree_swap_operands_p (op1, op2, false))
12293 /* See if this can be inverted. If it can't, possibly because
12294 it was a floating-point inequality comparison, don't do
12295 anything. */
12296 tem = fold_truth_not_expr (arg0);
12297 if (tem)
12298 return fold_build3 (code, type, tem, op2, op1);
12301 /* Convert A ? 1 : 0 to simply A. */
12302 if (integer_onep (op1)
12303 && integer_zerop (op2)
12304 /* If we try to convert OP0 to our type, the
12305 call to fold will try to move the conversion inside
12306 a COND, which will recurse. In that case, the COND_EXPR
12307 is probably the best choice, so leave it alone. */
12308 && type == TREE_TYPE (arg0))
12309 return pedantic_non_lvalue (arg0);
12311 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12312 over COND_EXPR in cases such as floating point comparisons. */
12313 if (integer_zerop (op1)
12314 && integer_onep (op2)
12315 && truth_value_p (TREE_CODE (arg0)))
12316 return pedantic_non_lvalue (fold_convert (type,
12317 invert_truthvalue (arg0)));
12319 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12320 if (TREE_CODE (arg0) == LT_EXPR
12321 && integer_zerop (TREE_OPERAND (arg0, 1))
12322 && integer_zerop (op2)
12323 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12325 /* sign_bit_p only checks ARG1 bits within A's precision.
12326 If <sign bit of A> has wider type than A, bits outside
12327 of A's precision in <sign bit of A> need to be checked.
12328 If they are all 0, this optimization needs to be done
12329 in unsigned A's type, if they are all 1 in signed A's type,
12330 otherwise this can't be done. */
12331 if (TYPE_PRECISION (TREE_TYPE (tem))
12332 < TYPE_PRECISION (TREE_TYPE (arg1))
12333 && TYPE_PRECISION (TREE_TYPE (tem))
12334 < TYPE_PRECISION (type))
12336 unsigned HOST_WIDE_INT mask_lo;
12337 HOST_WIDE_INT mask_hi;
12338 int inner_width, outer_width;
12339 tree tem_type;
12341 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12342 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12343 if (outer_width > TYPE_PRECISION (type))
12344 outer_width = TYPE_PRECISION (type);
12346 if (outer_width > HOST_BITS_PER_WIDE_INT)
12348 mask_hi = ((unsigned HOST_WIDE_INT) -1
12349 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12350 mask_lo = -1;
12352 else
12354 mask_hi = 0;
12355 mask_lo = ((unsigned HOST_WIDE_INT) -1
12356 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12358 if (inner_width > HOST_BITS_PER_WIDE_INT)
12360 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12361 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12362 mask_lo = 0;
12364 else
12365 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12366 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12368 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12369 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12371 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
12372 tem = fold_convert (tem_type, tem);
12374 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12375 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12377 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
12378 tem = fold_convert (tem_type, tem);
12380 else
12381 tem = NULL;
12384 if (tem)
12385 return fold_convert (type,
12386 fold_build2 (BIT_AND_EXPR,
12387 TREE_TYPE (tem), tem,
12388 fold_convert (TREE_TYPE (tem),
12389 arg1)));
12392 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12393 already handled above. */
12394 if (TREE_CODE (arg0) == BIT_AND_EXPR
12395 && integer_onep (TREE_OPERAND (arg0, 1))
12396 && integer_zerop (op2)
12397 && integer_pow2p (arg1))
12399 tree tem = TREE_OPERAND (arg0, 0);
12400 STRIP_NOPS (tem);
12401 if (TREE_CODE (tem) == RSHIFT_EXPR
12402 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12403 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12404 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12405 return fold_build2 (BIT_AND_EXPR, type,
12406 TREE_OPERAND (tem, 0), arg1);
12409 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12410 is probably obsolete because the first operand should be a
12411 truth value (that's why we have the two cases above), but let's
12412 leave it in until we can confirm this for all front-ends. */
12413 if (integer_zerop (op2)
12414 && TREE_CODE (arg0) == NE_EXPR
12415 && integer_zerop (TREE_OPERAND (arg0, 1))
12416 && integer_pow2p (arg1)
12417 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12418 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12419 arg1, OEP_ONLY_CONST))
12420 return pedantic_non_lvalue (fold_convert (type,
12421 TREE_OPERAND (arg0, 0)));
12423 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12424 if (integer_zerop (op2)
12425 && truth_value_p (TREE_CODE (arg0))
12426 && truth_value_p (TREE_CODE (arg1)))
12427 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12428 fold_convert (type, arg0),
12429 arg1);
12431 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12432 if (integer_onep (op2)
12433 && truth_value_p (TREE_CODE (arg0))
12434 && truth_value_p (TREE_CODE (arg1)))
12436 /* Only perform transformation if ARG0 is easily inverted. */
12437 tem = fold_truth_not_expr (arg0);
12438 if (tem)
12439 return fold_build2 (TRUTH_ORIF_EXPR, type,
12440 fold_convert (type, tem),
12441 arg1);
12444 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12445 if (integer_zerop (arg1)
12446 && truth_value_p (TREE_CODE (arg0))
12447 && truth_value_p (TREE_CODE (op2)))
12449 /* Only perform transformation if ARG0 is easily inverted. */
12450 tem = fold_truth_not_expr (arg0);
12451 if (tem)
12452 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12453 fold_convert (type, tem),
12454 op2);
12457 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12458 if (integer_onep (arg1)
12459 && truth_value_p (TREE_CODE (arg0))
12460 && truth_value_p (TREE_CODE (op2)))
12461 return fold_build2 (TRUTH_ORIF_EXPR, type,
12462 fold_convert (type, arg0),
12463 op2);
12465 return NULL_TREE;
12467 case CALL_EXPR:
12468 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12469 of fold_ternary on them. */
12470 gcc_unreachable ();
12472 case BIT_FIELD_REF:
12473 if (TREE_CODE (arg0) == VECTOR_CST
12474 && type == TREE_TYPE (TREE_TYPE (arg0))
12475 && host_integerp (arg1, 1)
12476 && host_integerp (op2, 1))
12478 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12479 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12481 if (width != 0
12482 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12483 && (idx % width) == 0
12484 && (idx = idx / width)
12485 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12487 tree elements = TREE_VECTOR_CST_ELTS (arg0);
12488 while (idx-- > 0 && elements)
12489 elements = TREE_CHAIN (elements);
12490 if (elements)
12491 return TREE_VALUE (elements);
12492 else
12493 return fold_convert (type, integer_zero_node);
12496 return NULL_TREE;
12498 default:
12499 return NULL_TREE;
12500 } /* switch (code) */
12503 /* Perform constant folding and related simplification of EXPR.
12504 The related simplifications include x*1 => x, x*0 => 0, etc.,
12505 and application of the associative law.
12506 NOP_EXPR conversions may be removed freely (as long as we
12507 are careful not to change the type of the overall expression).
12508 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12509 but we can constant-fold them if they have constant operands. */
12511 #ifdef ENABLE_FOLD_CHECKING
12512 # define fold(x) fold_1 (x)
12513 static tree fold_1 (tree);
12514 static
12515 #endif
12516 tree
12517 fold (tree expr)
12519 const tree t = expr;
12520 enum tree_code code = TREE_CODE (t);
12521 enum tree_code_class kind = TREE_CODE_CLASS (code);
12522 tree tem;
12524 /* Return right away if a constant. */
12525 if (kind == tcc_constant)
12526 return t;
12528 /* CALL_EXPR-like objects with variable numbers of operands are
12529 treated specially. */
12530 if (kind == tcc_vl_exp)
12532 if (code == CALL_EXPR)
12534 tem = fold_call_expr (expr, false);
12535 return tem ? tem : expr;
12537 return expr;
12540 if (IS_EXPR_CODE_CLASS (kind)
12541 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12543 tree type = TREE_TYPE (t);
12544 tree op0, op1, op2;
12546 switch (TREE_CODE_LENGTH (code))
12548 case 1:
12549 op0 = TREE_OPERAND (t, 0);
12550 tem = fold_unary (code, type, op0);
12551 return tem ? tem : expr;
12552 case 2:
12553 op0 = TREE_OPERAND (t, 0);
12554 op1 = TREE_OPERAND (t, 1);
12555 tem = fold_binary (code, type, op0, op1);
12556 return tem ? tem : expr;
12557 case 3:
12558 op0 = TREE_OPERAND (t, 0);
12559 op1 = TREE_OPERAND (t, 1);
12560 op2 = TREE_OPERAND (t, 2);
12561 tem = fold_ternary (code, type, op0, op1, op2);
12562 return tem ? tem : expr;
12563 default:
12564 break;
12568 switch (code)
12570 case CONST_DECL:
12571 return fold (DECL_INITIAL (t));
12573 default:
12574 return t;
12575 } /* switch (code) */
12578 #ifdef ENABLE_FOLD_CHECKING
12579 #undef fold
12581 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12582 static void fold_check_failed (tree, tree);
12583 void print_fold_checksum (tree);
12585 /* When --enable-checking=fold, compute a digest of expr before
12586 and after actual fold call to see if fold did not accidentally
12587 change original expr. */
12589 tree
12590 fold (tree expr)
12592 tree ret;
12593 struct md5_ctx ctx;
12594 unsigned char checksum_before[16], checksum_after[16];
12595 htab_t ht;
12597 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12598 md5_init_ctx (&ctx);
12599 fold_checksum_tree (expr, &ctx, ht);
12600 md5_finish_ctx (&ctx, checksum_before);
12601 htab_empty (ht);
12603 ret = fold_1 (expr);
12605 md5_init_ctx (&ctx);
12606 fold_checksum_tree (expr, &ctx, ht);
12607 md5_finish_ctx (&ctx, checksum_after);
12608 htab_delete (ht);
12610 if (memcmp (checksum_before, checksum_after, 16))
12611 fold_check_failed (expr, ret);
12613 return ret;
12616 void
12617 print_fold_checksum (tree expr)
12619 struct md5_ctx ctx;
12620 unsigned char checksum[16], cnt;
12621 htab_t ht;
12623 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12624 md5_init_ctx (&ctx);
12625 fold_checksum_tree (expr, &ctx, ht);
12626 md5_finish_ctx (&ctx, checksum);
12627 htab_delete (ht);
12628 for (cnt = 0; cnt < 16; ++cnt)
12629 fprintf (stderr, "%02x", checksum[cnt]);
12630 putc ('\n', stderr);
12633 static void
12634 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12636 internal_error ("fold check: original tree changed by fold");
12639 static void
12640 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12642 void **slot;
12643 enum tree_code code;
12644 struct tree_function_decl buf;
12645 int i, len;
12647 recursive_label:
12649 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12650 <= sizeof (struct tree_function_decl))
12651 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12652 if (expr == NULL)
12653 return;
12654 slot = htab_find_slot (ht, expr, INSERT);
12655 if (*slot != NULL)
12656 return;
12657 *slot = expr;
12658 code = TREE_CODE (expr);
12659 if (TREE_CODE_CLASS (code) == tcc_declaration
12660 && DECL_ASSEMBLER_NAME_SET_P (expr))
12662 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12663 memcpy ((char *) &buf, expr, tree_size (expr));
12664 expr = (tree) &buf;
12665 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12667 else if (TREE_CODE_CLASS (code) == tcc_type
12668 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12669 || TYPE_CACHED_VALUES_P (expr)
12670 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12672 /* Allow these fields to be modified. */
12673 memcpy ((char *) &buf, expr, tree_size (expr));
12674 expr = (tree) &buf;
12675 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12676 TYPE_POINTER_TO (expr) = NULL;
12677 TYPE_REFERENCE_TO (expr) = NULL;
12678 if (TYPE_CACHED_VALUES_P (expr))
12680 TYPE_CACHED_VALUES_P (expr) = 0;
12681 TYPE_CACHED_VALUES (expr) = NULL;
12684 md5_process_bytes (expr, tree_size (expr), ctx);
12685 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12686 if (TREE_CODE_CLASS (code) != tcc_type
12687 && TREE_CODE_CLASS (code) != tcc_declaration
12688 && code != TREE_LIST)
12689 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12690 switch (TREE_CODE_CLASS (code))
12692 case tcc_constant:
12693 switch (code)
12695 case STRING_CST:
12696 md5_process_bytes (TREE_STRING_POINTER (expr),
12697 TREE_STRING_LENGTH (expr), ctx);
12698 break;
12699 case COMPLEX_CST:
12700 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12701 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12702 break;
12703 case VECTOR_CST:
12704 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12705 break;
12706 default:
12707 break;
12709 break;
12710 case tcc_exceptional:
12711 switch (code)
12713 case TREE_LIST:
12714 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12715 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12716 expr = TREE_CHAIN (expr);
12717 goto recursive_label;
12718 break;
12719 case TREE_VEC:
12720 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12721 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12722 break;
12723 default:
12724 break;
12726 break;
12727 case tcc_expression:
12728 case tcc_reference:
12729 case tcc_comparison:
12730 case tcc_unary:
12731 case tcc_binary:
12732 case tcc_statement:
12733 case tcc_vl_exp:
12734 len = TREE_OPERAND_LENGTH (expr);
12735 for (i = 0; i < len; ++i)
12736 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12737 break;
12738 case tcc_declaration:
12739 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12740 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12741 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12743 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12744 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12745 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12746 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12747 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12749 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12750 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12752 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12754 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12755 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12756 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12758 break;
12759 case tcc_type:
12760 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12761 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12762 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12763 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12764 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12765 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12766 if (INTEGRAL_TYPE_P (expr)
12767 || SCALAR_FLOAT_TYPE_P (expr))
12769 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12770 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12772 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12773 if (TREE_CODE (expr) == RECORD_TYPE
12774 || TREE_CODE (expr) == UNION_TYPE
12775 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12776 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12777 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12778 break;
12779 default:
12780 break;
12784 #endif
12786 /* Fold a unary tree expression with code CODE of type TYPE with an
12787 operand OP0. Return a folded expression if successful. Otherwise,
12788 return a tree expression with code CODE of type TYPE with an
12789 operand OP0. */
12791 tree
12792 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12794 tree tem;
12795 #ifdef ENABLE_FOLD_CHECKING
12796 unsigned char checksum_before[16], checksum_after[16];
12797 struct md5_ctx ctx;
12798 htab_t ht;
12800 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12801 md5_init_ctx (&ctx);
12802 fold_checksum_tree (op0, &ctx, ht);
12803 md5_finish_ctx (&ctx, checksum_before);
12804 htab_empty (ht);
12805 #endif
12807 tem = fold_unary (code, type, op0);
12808 if (!tem)
12809 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12811 #ifdef ENABLE_FOLD_CHECKING
12812 md5_init_ctx (&ctx);
12813 fold_checksum_tree (op0, &ctx, ht);
12814 md5_finish_ctx (&ctx, checksum_after);
12815 htab_delete (ht);
12817 if (memcmp (checksum_before, checksum_after, 16))
12818 fold_check_failed (op0, tem);
12819 #endif
12820 return tem;
12823 /* Fold a binary tree expression with code CODE of type TYPE with
12824 operands OP0 and OP1. Return a folded expression if successful.
12825 Otherwise, return a tree expression with code CODE of type TYPE
12826 with operands OP0 and OP1. */
12828 tree
12829 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12830 MEM_STAT_DECL)
12832 tree tem;
12833 #ifdef ENABLE_FOLD_CHECKING
12834 unsigned char checksum_before_op0[16],
12835 checksum_before_op1[16],
12836 checksum_after_op0[16],
12837 checksum_after_op1[16];
12838 struct md5_ctx ctx;
12839 htab_t ht;
12841 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12842 md5_init_ctx (&ctx);
12843 fold_checksum_tree (op0, &ctx, ht);
12844 md5_finish_ctx (&ctx, checksum_before_op0);
12845 htab_empty (ht);
12847 md5_init_ctx (&ctx);
12848 fold_checksum_tree (op1, &ctx, ht);
12849 md5_finish_ctx (&ctx, checksum_before_op1);
12850 htab_empty (ht);
12851 #endif
12853 tem = fold_binary (code, type, op0, op1);
12854 if (!tem)
12855 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12857 #ifdef ENABLE_FOLD_CHECKING
12858 md5_init_ctx (&ctx);
12859 fold_checksum_tree (op0, &ctx, ht);
12860 md5_finish_ctx (&ctx, checksum_after_op0);
12861 htab_empty (ht);
12863 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12864 fold_check_failed (op0, tem);
12866 md5_init_ctx (&ctx);
12867 fold_checksum_tree (op1, &ctx, ht);
12868 md5_finish_ctx (&ctx, checksum_after_op1);
12869 htab_delete (ht);
12871 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12872 fold_check_failed (op1, tem);
12873 #endif
12874 return tem;
12877 /* Fold a ternary tree expression with code CODE of type TYPE with
12878 operands OP0, OP1, and OP2. Return a folded expression if
12879 successful. Otherwise, return a tree expression with code CODE of
12880 type TYPE with operands OP0, OP1, and OP2. */
12882 tree
12883 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12884 MEM_STAT_DECL)
12886 tree tem;
12887 #ifdef ENABLE_FOLD_CHECKING
12888 unsigned char checksum_before_op0[16],
12889 checksum_before_op1[16],
12890 checksum_before_op2[16],
12891 checksum_after_op0[16],
12892 checksum_after_op1[16],
12893 checksum_after_op2[16];
12894 struct md5_ctx ctx;
12895 htab_t ht;
12897 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12898 md5_init_ctx (&ctx);
12899 fold_checksum_tree (op0, &ctx, ht);
12900 md5_finish_ctx (&ctx, checksum_before_op0);
12901 htab_empty (ht);
12903 md5_init_ctx (&ctx);
12904 fold_checksum_tree (op1, &ctx, ht);
12905 md5_finish_ctx (&ctx, checksum_before_op1);
12906 htab_empty (ht);
12908 md5_init_ctx (&ctx);
12909 fold_checksum_tree (op2, &ctx, ht);
12910 md5_finish_ctx (&ctx, checksum_before_op2);
12911 htab_empty (ht);
12912 #endif
12914 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12915 tem = fold_ternary (code, type, op0, op1, op2);
12916 if (!tem)
12917 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12919 #ifdef ENABLE_FOLD_CHECKING
12920 md5_init_ctx (&ctx);
12921 fold_checksum_tree (op0, &ctx, ht);
12922 md5_finish_ctx (&ctx, checksum_after_op0);
12923 htab_empty (ht);
12925 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12926 fold_check_failed (op0, tem);
12928 md5_init_ctx (&ctx);
12929 fold_checksum_tree (op1, &ctx, ht);
12930 md5_finish_ctx (&ctx, checksum_after_op1);
12931 htab_empty (ht);
12933 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12934 fold_check_failed (op1, tem);
12936 md5_init_ctx (&ctx);
12937 fold_checksum_tree (op2, &ctx, ht);
12938 md5_finish_ctx (&ctx, checksum_after_op2);
12939 htab_delete (ht);
12941 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12942 fold_check_failed (op2, tem);
12943 #endif
12944 return tem;
12947 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12948 arguments in ARGARRAY, and a null static chain.
12949 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12950 of type TYPE from the given operands as constructed by build_call_array. */
12952 tree
12953 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
12955 tree tem;
12956 #ifdef ENABLE_FOLD_CHECKING
12957 unsigned char checksum_before_fn[16],
12958 checksum_before_arglist[16],
12959 checksum_after_fn[16],
12960 checksum_after_arglist[16];
12961 struct md5_ctx ctx;
12962 htab_t ht;
12963 int i;
12965 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12966 md5_init_ctx (&ctx);
12967 fold_checksum_tree (fn, &ctx, ht);
12968 md5_finish_ctx (&ctx, checksum_before_fn);
12969 htab_empty (ht);
12971 md5_init_ctx (&ctx);
12972 for (i = 0; i < nargs; i++)
12973 fold_checksum_tree (argarray[i], &ctx, ht);
12974 md5_finish_ctx (&ctx, checksum_before_arglist);
12975 htab_empty (ht);
12976 #endif
12978 tem = fold_builtin_call_array (type, fn, nargs, argarray);
12980 #ifdef ENABLE_FOLD_CHECKING
12981 md5_init_ctx (&ctx);
12982 fold_checksum_tree (fn, &ctx, ht);
12983 md5_finish_ctx (&ctx, checksum_after_fn);
12984 htab_empty (ht);
12986 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12987 fold_check_failed (fn, tem);
12989 md5_init_ctx (&ctx);
12990 for (i = 0; i < nargs; i++)
12991 fold_checksum_tree (argarray[i], &ctx, ht);
12992 md5_finish_ctx (&ctx, checksum_after_arglist);
12993 htab_delete (ht);
12995 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12996 fold_check_failed (NULL_TREE, tem);
12997 #endif
12998 return tem;
13001 /* Perform constant folding and related simplification of initializer
13002 expression EXPR. These behave identically to "fold_buildN" but ignore
13003 potential run-time traps and exceptions that fold must preserve. */
13005 #define START_FOLD_INIT \
13006 int saved_signaling_nans = flag_signaling_nans;\
13007 int saved_trapping_math = flag_trapping_math;\
13008 int saved_rounding_math = flag_rounding_math;\
13009 int saved_trapv = flag_trapv;\
13010 int saved_folding_initializer = folding_initializer;\
13011 flag_signaling_nans = 0;\
13012 flag_trapping_math = 0;\
13013 flag_rounding_math = 0;\
13014 flag_trapv = 0;\
13015 folding_initializer = 1;
13017 #define END_FOLD_INIT \
13018 flag_signaling_nans = saved_signaling_nans;\
13019 flag_trapping_math = saved_trapping_math;\
13020 flag_rounding_math = saved_rounding_math;\
13021 flag_trapv = saved_trapv;\
13022 folding_initializer = saved_folding_initializer;
13024 tree
13025 fold_build1_initializer (enum tree_code code, tree type, tree op)
13027 tree result;
13028 START_FOLD_INIT;
13030 result = fold_build1 (code, type, op);
13032 END_FOLD_INIT;
13033 return result;
13036 tree
13037 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13039 tree result;
13040 START_FOLD_INIT;
13042 result = fold_build2 (code, type, op0, op1);
13044 END_FOLD_INIT;
13045 return result;
13048 tree
13049 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13050 tree op2)
13052 tree result;
13053 START_FOLD_INIT;
13055 result = fold_build3 (code, type, op0, op1, op2);
13057 END_FOLD_INIT;
13058 return result;
13061 tree
13062 fold_build_call_array_initializer (tree type, tree fn,
13063 int nargs, tree *argarray)
13065 tree result;
13066 START_FOLD_INIT;
13068 result = fold_build_call_array (type, fn, nargs, argarray);
13070 END_FOLD_INIT;
13071 return result;
13074 #undef START_FOLD_INIT
13075 #undef END_FOLD_INIT
13077 /* Determine if first argument is a multiple of second argument. Return 0 if
13078 it is not, or we cannot easily determined it to be.
13080 An example of the sort of thing we care about (at this point; this routine
13081 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13082 fold cases do now) is discovering that
13084 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13086 is a multiple of
13088 SAVE_EXPR (J * 8)
13090 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13092 This code also handles discovering that
13094 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13096 is a multiple of 8 so we don't have to worry about dealing with a
13097 possible remainder.
13099 Note that we *look* inside a SAVE_EXPR only to determine how it was
13100 calculated; it is not safe for fold to do much of anything else with the
13101 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13102 at run time. For example, the latter example above *cannot* be implemented
13103 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13104 evaluation time of the original SAVE_EXPR is not necessarily the same at
13105 the time the new expression is evaluated. The only optimization of this
13106 sort that would be valid is changing
13108 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13110 divided by 8 to
13112 SAVE_EXPR (I) * SAVE_EXPR (J)
13114 (where the same SAVE_EXPR (J) is used in the original and the
13115 transformed version). */
13118 multiple_of_p (tree type, tree top, tree bottom)
13120 if (operand_equal_p (top, bottom, 0))
13121 return 1;
13123 if (TREE_CODE (type) != INTEGER_TYPE)
13124 return 0;
13126 switch (TREE_CODE (top))
13128 case BIT_AND_EXPR:
13129 /* Bitwise and provides a power of two multiple. If the mask is
13130 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13131 if (!integer_pow2p (bottom))
13132 return 0;
13133 /* FALLTHRU */
13135 case MULT_EXPR:
13136 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13137 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13139 case PLUS_EXPR:
13140 case MINUS_EXPR:
13141 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13142 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13144 case LSHIFT_EXPR:
13145 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13147 tree op1, t1;
13149 op1 = TREE_OPERAND (top, 1);
13150 /* const_binop may not detect overflow correctly,
13151 so check for it explicitly here. */
13152 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13153 > TREE_INT_CST_LOW (op1)
13154 && TREE_INT_CST_HIGH (op1) == 0
13155 && 0 != (t1 = fold_convert (type,
13156 const_binop (LSHIFT_EXPR,
13157 size_one_node,
13158 op1, 0)))
13159 && !TREE_OVERFLOW (t1))
13160 return multiple_of_p (type, t1, bottom);
13162 return 0;
13164 case NOP_EXPR:
13165 /* Can't handle conversions from non-integral or wider integral type. */
13166 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13167 || (TYPE_PRECISION (type)
13168 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13169 return 0;
13171 /* .. fall through ... */
13173 case SAVE_EXPR:
13174 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13176 case INTEGER_CST:
13177 if (TREE_CODE (bottom) != INTEGER_CST
13178 || (TYPE_UNSIGNED (type)
13179 && (tree_int_cst_sgn (top) < 0
13180 || tree_int_cst_sgn (bottom) < 0)))
13181 return 0;
13182 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13183 top, bottom, 0));
13185 default:
13186 return 0;
13190 /* Return true if `t' is known to be non-negative. If the return
13191 value is based on the assumption that signed overflow is undefined,
13192 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13193 *STRICT_OVERFLOW_P. */
13195 bool
13196 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13198 if (t == error_mark_node)
13199 return false;
13201 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13202 return true;
13204 switch (TREE_CODE (t))
13206 case SSA_NAME:
13207 /* Query VRP to see if it has recorded any information about
13208 the range of this object. */
13209 return ssa_name_nonnegative_p (t);
13211 case ABS_EXPR:
13212 /* We can't return 1 if flag_wrapv is set because
13213 ABS_EXPR<INT_MIN> = INT_MIN. */
13214 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13215 return true;
13216 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13218 *strict_overflow_p = true;
13219 return true;
13221 break;
13223 case INTEGER_CST:
13224 return tree_int_cst_sgn (t) >= 0;
13226 case REAL_CST:
13227 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13229 case PLUS_EXPR:
13230 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13231 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13232 strict_overflow_p)
13233 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13234 strict_overflow_p));
13236 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13237 both unsigned and at least 2 bits shorter than the result. */
13238 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13239 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13240 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13242 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13243 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13244 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13245 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13247 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13248 TYPE_PRECISION (inner2)) + 1;
13249 return prec < TYPE_PRECISION (TREE_TYPE (t));
13252 break;
13254 case MULT_EXPR:
13255 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13257 /* x * x for floating point x is always non-negative. */
13258 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13259 return true;
13260 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13261 strict_overflow_p)
13262 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13263 strict_overflow_p));
13266 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13267 both unsigned and their total bits is shorter than the result. */
13268 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13269 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13270 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13272 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13273 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13274 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13275 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13276 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13277 < TYPE_PRECISION (TREE_TYPE (t));
13279 return false;
13281 case BIT_AND_EXPR:
13282 case MAX_EXPR:
13283 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13284 strict_overflow_p)
13285 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13286 strict_overflow_p));
13288 case BIT_IOR_EXPR:
13289 case BIT_XOR_EXPR:
13290 case MIN_EXPR:
13291 case RDIV_EXPR:
13292 case TRUNC_DIV_EXPR:
13293 case CEIL_DIV_EXPR:
13294 case FLOOR_DIV_EXPR:
13295 case ROUND_DIV_EXPR:
13296 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13297 strict_overflow_p)
13298 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13299 strict_overflow_p));
13301 case TRUNC_MOD_EXPR:
13302 case CEIL_MOD_EXPR:
13303 case FLOOR_MOD_EXPR:
13304 case ROUND_MOD_EXPR:
13305 case SAVE_EXPR:
13306 case NON_LVALUE_EXPR:
13307 case FLOAT_EXPR:
13308 case FIX_TRUNC_EXPR:
13309 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13310 strict_overflow_p);
13312 case COMPOUND_EXPR:
13313 case MODIFY_EXPR:
13314 case GIMPLE_MODIFY_STMT:
13315 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13316 strict_overflow_p);
13318 case BIND_EXPR:
13319 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13320 strict_overflow_p);
13322 case COND_EXPR:
13323 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13324 strict_overflow_p)
13325 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13326 strict_overflow_p));
13328 case NOP_EXPR:
13330 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13331 tree outer_type = TREE_TYPE (t);
13333 if (TREE_CODE (outer_type) == REAL_TYPE)
13335 if (TREE_CODE (inner_type) == REAL_TYPE)
13336 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13337 strict_overflow_p);
13338 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13340 if (TYPE_UNSIGNED (inner_type))
13341 return true;
13342 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13343 strict_overflow_p);
13346 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13348 if (TREE_CODE (inner_type) == REAL_TYPE)
13349 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13350 strict_overflow_p);
13351 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13352 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13353 && TYPE_UNSIGNED (inner_type);
13356 break;
13358 case TARGET_EXPR:
13360 tree temp = TARGET_EXPR_SLOT (t);
13361 t = TARGET_EXPR_INITIAL (t);
13363 /* If the initializer is non-void, then it's a normal expression
13364 that will be assigned to the slot. */
13365 if (!VOID_TYPE_P (t))
13366 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13368 /* Otherwise, the initializer sets the slot in some way. One common
13369 way is an assignment statement at the end of the initializer. */
13370 while (1)
13372 if (TREE_CODE (t) == BIND_EXPR)
13373 t = expr_last (BIND_EXPR_BODY (t));
13374 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13375 || TREE_CODE (t) == TRY_CATCH_EXPR)
13376 t = expr_last (TREE_OPERAND (t, 0));
13377 else if (TREE_CODE (t) == STATEMENT_LIST)
13378 t = expr_last (t);
13379 else
13380 break;
13382 if ((TREE_CODE (t) == MODIFY_EXPR
13383 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13384 && GENERIC_TREE_OPERAND (t, 0) == temp)
13385 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13386 strict_overflow_p);
13388 return false;
13391 case CALL_EXPR:
13393 tree fndecl = get_callee_fndecl (t);
13394 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13395 switch (DECL_FUNCTION_CODE (fndecl))
13397 CASE_FLT_FN (BUILT_IN_ACOS):
13398 CASE_FLT_FN (BUILT_IN_ACOSH):
13399 CASE_FLT_FN (BUILT_IN_CABS):
13400 CASE_FLT_FN (BUILT_IN_COSH):
13401 CASE_FLT_FN (BUILT_IN_ERFC):
13402 CASE_FLT_FN (BUILT_IN_EXP):
13403 CASE_FLT_FN (BUILT_IN_EXP10):
13404 CASE_FLT_FN (BUILT_IN_EXP2):
13405 CASE_FLT_FN (BUILT_IN_FABS):
13406 CASE_FLT_FN (BUILT_IN_FDIM):
13407 CASE_FLT_FN (BUILT_IN_HYPOT):
13408 CASE_FLT_FN (BUILT_IN_POW10):
13409 CASE_INT_FN (BUILT_IN_FFS):
13410 CASE_INT_FN (BUILT_IN_PARITY):
13411 CASE_INT_FN (BUILT_IN_POPCOUNT):
13412 case BUILT_IN_BSWAP32:
13413 case BUILT_IN_BSWAP64:
13414 /* Always true. */
13415 return true;
13417 CASE_FLT_FN (BUILT_IN_SQRT):
13418 /* sqrt(-0.0) is -0.0. */
13419 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13420 return true;
13421 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13422 strict_overflow_p);
13424 CASE_FLT_FN (BUILT_IN_ASINH):
13425 CASE_FLT_FN (BUILT_IN_ATAN):
13426 CASE_FLT_FN (BUILT_IN_ATANH):
13427 CASE_FLT_FN (BUILT_IN_CBRT):
13428 CASE_FLT_FN (BUILT_IN_CEIL):
13429 CASE_FLT_FN (BUILT_IN_ERF):
13430 CASE_FLT_FN (BUILT_IN_EXPM1):
13431 CASE_FLT_FN (BUILT_IN_FLOOR):
13432 CASE_FLT_FN (BUILT_IN_FMOD):
13433 CASE_FLT_FN (BUILT_IN_FREXP):
13434 CASE_FLT_FN (BUILT_IN_LCEIL):
13435 CASE_FLT_FN (BUILT_IN_LDEXP):
13436 CASE_FLT_FN (BUILT_IN_LFLOOR):
13437 CASE_FLT_FN (BUILT_IN_LLCEIL):
13438 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13439 CASE_FLT_FN (BUILT_IN_LLRINT):
13440 CASE_FLT_FN (BUILT_IN_LLROUND):
13441 CASE_FLT_FN (BUILT_IN_LRINT):
13442 CASE_FLT_FN (BUILT_IN_LROUND):
13443 CASE_FLT_FN (BUILT_IN_MODF):
13444 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13445 CASE_FLT_FN (BUILT_IN_RINT):
13446 CASE_FLT_FN (BUILT_IN_ROUND):
13447 CASE_FLT_FN (BUILT_IN_SCALB):
13448 CASE_FLT_FN (BUILT_IN_SCALBLN):
13449 CASE_FLT_FN (BUILT_IN_SCALBN):
13450 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13451 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13452 CASE_FLT_FN (BUILT_IN_SINH):
13453 CASE_FLT_FN (BUILT_IN_TANH):
13454 CASE_FLT_FN (BUILT_IN_TRUNC):
13455 /* True if the 1st argument is nonnegative. */
13456 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13457 strict_overflow_p);
13459 CASE_FLT_FN (BUILT_IN_FMAX):
13460 /* True if the 1st OR 2nd arguments are nonnegative. */
13461 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13462 strict_overflow_p)
13463 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13464 strict_overflow_p)));
13466 CASE_FLT_FN (BUILT_IN_FMIN):
13467 /* True if the 1st AND 2nd arguments are nonnegative. */
13468 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13469 strict_overflow_p)
13470 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13471 strict_overflow_p)));
13473 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13474 /* True if the 2nd argument is nonnegative. */
13475 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13476 strict_overflow_p);
13478 CASE_FLT_FN (BUILT_IN_POWI):
13479 /* True if the 1st argument is nonnegative or the second
13480 argument is an even integer. */
13481 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13483 tree arg1 = CALL_EXPR_ARG (t, 1);
13484 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13485 return true;
13487 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13488 strict_overflow_p);
13490 CASE_FLT_FN (BUILT_IN_POW):
13491 /* True if the 1st argument is nonnegative or the second
13492 argument is an even integer valued real. */
13493 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13495 REAL_VALUE_TYPE c;
13496 HOST_WIDE_INT n;
13498 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13499 n = real_to_integer (&c);
13500 if ((n & 1) == 0)
13502 REAL_VALUE_TYPE cint;
13503 real_from_integer (&cint, VOIDmode, n,
13504 n < 0 ? -1 : 0, 0);
13505 if (real_identical (&c, &cint))
13506 return true;
13509 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13510 strict_overflow_p);
13512 default:
13513 break;
13517 /* ... fall through ... */
13519 default:
13520 if (truth_value_p (TREE_CODE (t)))
13521 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13522 return true;
13525 /* We don't know sign of `t', so be conservative and return false. */
13526 return false;
13529 /* Return true if `t' is known to be non-negative. Handle warnings
13530 about undefined signed overflow. */
13532 bool
13533 tree_expr_nonnegative_p (tree t)
13535 bool ret, strict_overflow_p;
13537 strict_overflow_p = false;
13538 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13539 if (strict_overflow_p)
13540 fold_overflow_warning (("assuming signed overflow does not occur when "
13541 "determining that expression is always "
13542 "non-negative"),
13543 WARN_STRICT_OVERFLOW_MISC);
13544 return ret;
13547 /* Return true when T is an address and is known to be nonzero.
13548 For floating point we further ensure that T is not denormal.
13549 Similar logic is present in nonzero_address in rtlanal.h.
13551 If the return value is based on the assumption that signed overflow
13552 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13553 change *STRICT_OVERFLOW_P. */
13555 bool
13556 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13558 tree type = TREE_TYPE (t);
13559 bool sub_strict_overflow_p;
13561 /* Doing something useful for floating point would need more work. */
13562 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13563 return false;
13565 switch (TREE_CODE (t))
13567 case SSA_NAME:
13568 /* Query VRP to see if it has recorded any information about
13569 the range of this object. */
13570 return ssa_name_nonzero_p (t);
13572 case ABS_EXPR:
13573 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13574 strict_overflow_p);
13576 case INTEGER_CST:
13577 return !integer_zerop (t);
13579 case PLUS_EXPR:
13580 if (TYPE_OVERFLOW_UNDEFINED (type))
13582 /* With the presence of negative values it is hard
13583 to say something. */
13584 sub_strict_overflow_p = false;
13585 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13586 &sub_strict_overflow_p)
13587 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13588 &sub_strict_overflow_p))
13589 return false;
13590 /* One of operands must be positive and the other non-negative. */
13591 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13592 overflows, on a twos-complement machine the sum of two
13593 nonnegative numbers can never be zero. */
13594 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13595 strict_overflow_p)
13596 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13597 strict_overflow_p));
13599 break;
13601 case MULT_EXPR:
13602 if (TYPE_OVERFLOW_UNDEFINED (type))
13604 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13605 strict_overflow_p)
13606 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13607 strict_overflow_p))
13609 *strict_overflow_p = true;
13610 return true;
13613 break;
13615 case NOP_EXPR:
13617 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13618 tree outer_type = TREE_TYPE (t);
13620 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13621 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13622 strict_overflow_p));
13624 break;
13626 case ADDR_EXPR:
13628 tree base = get_base_address (TREE_OPERAND (t, 0));
13630 if (!base)
13631 return false;
13633 /* Weak declarations may link to NULL. */
13634 if (VAR_OR_FUNCTION_DECL_P (base))
13635 return !DECL_WEAK (base);
13637 /* Constants are never weak. */
13638 if (CONSTANT_CLASS_P (base))
13639 return true;
13641 return false;
13644 case COND_EXPR:
13645 sub_strict_overflow_p = false;
13646 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13647 &sub_strict_overflow_p)
13648 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13649 &sub_strict_overflow_p))
13651 if (sub_strict_overflow_p)
13652 *strict_overflow_p = true;
13653 return true;
13655 break;
13657 case MIN_EXPR:
13658 sub_strict_overflow_p = false;
13659 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13660 &sub_strict_overflow_p)
13661 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13662 &sub_strict_overflow_p))
13664 if (sub_strict_overflow_p)
13665 *strict_overflow_p = true;
13667 break;
13669 case MAX_EXPR:
13670 sub_strict_overflow_p = false;
13671 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13672 &sub_strict_overflow_p))
13674 if (sub_strict_overflow_p)
13675 *strict_overflow_p = true;
13677 /* When both operands are nonzero, then MAX must be too. */
13678 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13679 strict_overflow_p))
13680 return true;
13682 /* MAX where operand 0 is positive is positive. */
13683 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13684 strict_overflow_p);
13686 /* MAX where operand 1 is positive is positive. */
13687 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13688 &sub_strict_overflow_p)
13689 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13690 &sub_strict_overflow_p))
13692 if (sub_strict_overflow_p)
13693 *strict_overflow_p = true;
13694 return true;
13696 break;
13698 case COMPOUND_EXPR:
13699 case MODIFY_EXPR:
13700 case GIMPLE_MODIFY_STMT:
13701 case BIND_EXPR:
13702 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13703 strict_overflow_p);
13705 case SAVE_EXPR:
13706 case NON_LVALUE_EXPR:
13707 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13708 strict_overflow_p);
13710 case BIT_IOR_EXPR:
13711 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13712 strict_overflow_p)
13713 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13714 strict_overflow_p));
13716 case CALL_EXPR:
13717 return alloca_call_p (t);
13719 default:
13720 break;
13722 return false;
13725 /* Return true when T is an address and is known to be nonzero.
13726 Handle warnings about undefined signed overflow. */
13728 bool
13729 tree_expr_nonzero_p (tree t)
13731 bool ret, strict_overflow_p;
13733 strict_overflow_p = false;
13734 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13735 if (strict_overflow_p)
13736 fold_overflow_warning (("assuming signed overflow does not occur when "
13737 "determining that expression is always "
13738 "non-zero"),
13739 WARN_STRICT_OVERFLOW_MISC);
13740 return ret;
13743 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13744 attempt to fold the expression to a constant without modifying TYPE,
13745 OP0 or OP1.
13747 If the expression could be simplified to a constant, then return
13748 the constant. If the expression would not be simplified to a
13749 constant, then return NULL_TREE. */
13751 tree
13752 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13754 tree tem = fold_binary (code, type, op0, op1);
13755 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13758 /* Given the components of a unary expression CODE, TYPE and OP0,
13759 attempt to fold the expression to a constant without modifying
13760 TYPE or OP0.
13762 If the expression could be simplified to a constant, then return
13763 the constant. If the expression would not be simplified to a
13764 constant, then return NULL_TREE. */
13766 tree
13767 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13769 tree tem = fold_unary (code, type, op0);
13770 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13773 /* If EXP represents referencing an element in a constant string
13774 (either via pointer arithmetic or array indexing), return the
13775 tree representing the value accessed, otherwise return NULL. */
13777 tree
13778 fold_read_from_constant_string (tree exp)
13780 if ((TREE_CODE (exp) == INDIRECT_REF
13781 || TREE_CODE (exp) == ARRAY_REF)
13782 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13784 tree exp1 = TREE_OPERAND (exp, 0);
13785 tree index;
13786 tree string;
13788 if (TREE_CODE (exp) == INDIRECT_REF)
13789 string = string_constant (exp1, &index);
13790 else
13792 tree low_bound = array_ref_low_bound (exp);
13793 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13795 /* Optimize the special-case of a zero lower bound.
13797 We convert the low_bound to sizetype to avoid some problems
13798 with constant folding. (E.g. suppose the lower bound is 1,
13799 and its mode is QI. Without the conversion,l (ARRAY
13800 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13801 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13802 if (! integer_zerop (low_bound))
13803 index = size_diffop (index, fold_convert (sizetype, low_bound));
13805 string = exp1;
13808 if (string
13809 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13810 && TREE_CODE (string) == STRING_CST
13811 && TREE_CODE (index) == INTEGER_CST
13812 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13813 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13814 == MODE_INT)
13815 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13816 return fold_convert (TREE_TYPE (exp),
13817 build_int_cst (NULL_TREE,
13818 (TREE_STRING_POINTER (string)
13819 [TREE_INT_CST_LOW (index)])));
13821 return NULL;
13824 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13825 an integer constant or real constant.
13827 TYPE is the type of the result. */
13829 static tree
13830 fold_negate_const (tree arg0, tree type)
13832 tree t = NULL_TREE;
13834 switch (TREE_CODE (arg0))
13836 case INTEGER_CST:
13838 unsigned HOST_WIDE_INT low;
13839 HOST_WIDE_INT high;
13840 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13841 TREE_INT_CST_HIGH (arg0),
13842 &low, &high);
13843 t = force_fit_type_double (type, low, high, 1,
13844 (overflow | TREE_OVERFLOW (arg0))
13845 && !TYPE_UNSIGNED (type));
13846 break;
13849 case REAL_CST:
13850 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13851 break;
13853 default:
13854 gcc_unreachable ();
13857 return t;
13860 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13861 an integer constant or real constant.
13863 TYPE is the type of the result. */
13865 tree
13866 fold_abs_const (tree arg0, tree type)
13868 tree t = NULL_TREE;
13870 switch (TREE_CODE (arg0))
13872 case INTEGER_CST:
13873 /* If the value is unsigned, then the absolute value is
13874 the same as the ordinary value. */
13875 if (TYPE_UNSIGNED (type))
13876 t = arg0;
13877 /* Similarly, if the value is non-negative. */
13878 else if (INT_CST_LT (integer_minus_one_node, arg0))
13879 t = arg0;
13880 /* If the value is negative, then the absolute value is
13881 its negation. */
13882 else
13884 unsigned HOST_WIDE_INT low;
13885 HOST_WIDE_INT high;
13886 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13887 TREE_INT_CST_HIGH (arg0),
13888 &low, &high);
13889 t = force_fit_type_double (type, low, high, -1,
13890 overflow | TREE_OVERFLOW (arg0));
13892 break;
13894 case REAL_CST:
13895 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13896 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13897 else
13898 t = arg0;
13899 break;
13901 default:
13902 gcc_unreachable ();
13905 return t;
13908 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13909 constant. TYPE is the type of the result. */
13911 static tree
13912 fold_not_const (tree arg0, tree type)
13914 tree t = NULL_TREE;
13916 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13918 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13919 ~TREE_INT_CST_HIGH (arg0), 0,
13920 TREE_OVERFLOW (arg0));
13922 return t;
13925 /* Given CODE, a relational operator, the target type, TYPE and two
13926 constant operands OP0 and OP1, return the result of the
13927 relational operation. If the result is not a compile time
13928 constant, then return NULL_TREE. */
13930 static tree
13931 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13933 int result, invert;
13935 /* From here on, the only cases we handle are when the result is
13936 known to be a constant. */
13938 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13940 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13941 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13943 /* Handle the cases where either operand is a NaN. */
13944 if (real_isnan (c0) || real_isnan (c1))
13946 switch (code)
13948 case EQ_EXPR:
13949 case ORDERED_EXPR:
13950 result = 0;
13951 break;
13953 case NE_EXPR:
13954 case UNORDERED_EXPR:
13955 case UNLT_EXPR:
13956 case UNLE_EXPR:
13957 case UNGT_EXPR:
13958 case UNGE_EXPR:
13959 case UNEQ_EXPR:
13960 result = 1;
13961 break;
13963 case LT_EXPR:
13964 case LE_EXPR:
13965 case GT_EXPR:
13966 case GE_EXPR:
13967 case LTGT_EXPR:
13968 if (flag_trapping_math)
13969 return NULL_TREE;
13970 result = 0;
13971 break;
13973 default:
13974 gcc_unreachable ();
13977 return constant_boolean_node (result, type);
13980 return constant_boolean_node (real_compare (code, c0, c1), type);
13983 /* Handle equality/inequality of complex constants. */
13984 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13986 tree rcond = fold_relational_const (code, type,
13987 TREE_REALPART (op0),
13988 TREE_REALPART (op1));
13989 tree icond = fold_relational_const (code, type,
13990 TREE_IMAGPART (op0),
13991 TREE_IMAGPART (op1));
13992 if (code == EQ_EXPR)
13993 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13994 else if (code == NE_EXPR)
13995 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13996 else
13997 return NULL_TREE;
14000 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14002 To compute GT, swap the arguments and do LT.
14003 To compute GE, do LT and invert the result.
14004 To compute LE, swap the arguments, do LT and invert the result.
14005 To compute NE, do EQ and invert the result.
14007 Therefore, the code below must handle only EQ and LT. */
14009 if (code == LE_EXPR || code == GT_EXPR)
14011 tree tem = op0;
14012 op0 = op1;
14013 op1 = tem;
14014 code = swap_tree_comparison (code);
14017 /* Note that it is safe to invert for real values here because we
14018 have already handled the one case that it matters. */
14020 invert = 0;
14021 if (code == NE_EXPR || code == GE_EXPR)
14023 invert = 1;
14024 code = invert_tree_comparison (code, false);
14027 /* Compute a result for LT or EQ if args permit;
14028 Otherwise return T. */
14029 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14031 if (code == EQ_EXPR)
14032 result = tree_int_cst_equal (op0, op1);
14033 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14034 result = INT_CST_LT_UNSIGNED (op0, op1);
14035 else
14036 result = INT_CST_LT (op0, op1);
14038 else
14039 return NULL_TREE;
14041 if (invert)
14042 result ^= 1;
14043 return constant_boolean_node (result, type);
14046 /* Build an expression for the a clean point containing EXPR with type TYPE.
14047 Don't build a cleanup point expression for EXPR which don't have side
14048 effects. */
14050 tree
14051 fold_build_cleanup_point_expr (tree type, tree expr)
14053 /* If the expression does not have side effects then we don't have to wrap
14054 it with a cleanup point expression. */
14055 if (!TREE_SIDE_EFFECTS (expr))
14056 return expr;
14058 /* If the expression is a return, check to see if the expression inside the
14059 return has no side effects or the right hand side of the modify expression
14060 inside the return. If either don't have side effects set we don't need to
14061 wrap the expression in a cleanup point expression. Note we don't check the
14062 left hand side of the modify because it should always be a return decl. */
14063 if (TREE_CODE (expr) == RETURN_EXPR)
14065 tree op = TREE_OPERAND (expr, 0);
14066 if (!op || !TREE_SIDE_EFFECTS (op))
14067 return expr;
14068 op = TREE_OPERAND (op, 1);
14069 if (!TREE_SIDE_EFFECTS (op))
14070 return expr;
14073 return build1 (CLEANUP_POINT_EXPR, type, expr);
14076 /* Build an expression for the address of T. Folds away INDIRECT_REF to
14077 avoid confusing the gimplify process. */
14079 tree
14080 build_fold_addr_expr_with_type (tree t, tree ptrtype)
14082 /* The size of the object is not relevant when talking about its address. */
14083 if (TREE_CODE (t) == WITH_SIZE_EXPR)
14084 t = TREE_OPERAND (t, 0);
14086 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
14087 if (TREE_CODE (t) == INDIRECT_REF
14088 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
14090 t = TREE_OPERAND (t, 0);
14091 if (TREE_TYPE (t) != ptrtype)
14092 t = build1 (NOP_EXPR, ptrtype, t);
14094 else
14096 tree base = t;
14098 while (handled_component_p (base))
14099 base = TREE_OPERAND (base, 0);
14100 if (DECL_P (base))
14101 TREE_ADDRESSABLE (base) = 1;
14103 t = build1 (ADDR_EXPR, ptrtype, t);
14106 return t;
14109 tree
14110 build_fold_addr_expr (tree t)
14112 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
14115 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14116 of an indirection through OP0, or NULL_TREE if no simplification is
14117 possible. */
14119 tree
14120 fold_indirect_ref_1 (tree type, tree op0)
14122 tree sub = op0;
14123 tree subtype;
14125 STRIP_NOPS (sub);
14126 subtype = TREE_TYPE (sub);
14127 if (!POINTER_TYPE_P (subtype))
14128 return NULL_TREE;
14130 if (TREE_CODE (sub) == ADDR_EXPR)
14132 tree op = TREE_OPERAND (sub, 0);
14133 tree optype = TREE_TYPE (op);
14134 /* *&CONST_DECL -> to the value of the const decl. */
14135 if (TREE_CODE (op) == CONST_DECL)
14136 return DECL_INITIAL (op);
14137 /* *&p => p; make sure to handle *&"str"[cst] here. */
14138 if (type == optype)
14140 tree fop = fold_read_from_constant_string (op);
14141 if (fop)
14142 return fop;
14143 else
14144 return op;
14146 /* *(foo *)&fooarray => fooarray[0] */
14147 else if (TREE_CODE (optype) == ARRAY_TYPE
14148 && type == TREE_TYPE (optype))
14150 tree type_domain = TYPE_DOMAIN (optype);
14151 tree min_val = size_zero_node;
14152 if (type_domain && TYPE_MIN_VALUE (type_domain))
14153 min_val = TYPE_MIN_VALUE (type_domain);
14154 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14156 /* *(foo *)&complexfoo => __real__ complexfoo */
14157 else if (TREE_CODE (optype) == COMPLEX_TYPE
14158 && type == TREE_TYPE (optype))
14159 return fold_build1 (REALPART_EXPR, type, op);
14160 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14161 else if (TREE_CODE (optype) == VECTOR_TYPE
14162 && type == TREE_TYPE (optype))
14164 tree part_width = TYPE_SIZE (type);
14165 tree index = bitsize_int (0);
14166 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14170 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14171 if (TREE_CODE (sub) == PLUS_EXPR
14172 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14174 tree op00 = TREE_OPERAND (sub, 0);
14175 tree op01 = TREE_OPERAND (sub, 1);
14176 tree op00type;
14178 STRIP_NOPS (op00);
14179 op00type = TREE_TYPE (op00);
14180 if (TREE_CODE (op00) == ADDR_EXPR
14181 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14182 && type == TREE_TYPE (TREE_TYPE (op00type)))
14184 tree size = TYPE_SIZE_UNIT (type);
14185 if (tree_int_cst_equal (size, op01))
14186 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14190 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14191 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14192 && type == TREE_TYPE (TREE_TYPE (subtype)))
14194 tree type_domain;
14195 tree min_val = size_zero_node;
14196 sub = build_fold_indirect_ref (sub);
14197 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14198 if (type_domain && TYPE_MIN_VALUE (type_domain))
14199 min_val = TYPE_MIN_VALUE (type_domain);
14200 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14203 return NULL_TREE;
14206 /* Builds an expression for an indirection through T, simplifying some
14207 cases. */
14209 tree
14210 build_fold_indirect_ref (tree t)
14212 tree type = TREE_TYPE (TREE_TYPE (t));
14213 tree sub = fold_indirect_ref_1 (type, t);
14215 if (sub)
14216 return sub;
14217 else
14218 return build1 (INDIRECT_REF, type, t);
14221 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14223 tree
14224 fold_indirect_ref (tree t)
14226 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14228 if (sub)
14229 return sub;
14230 else
14231 return t;
14234 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14235 whose result is ignored. The type of the returned tree need not be
14236 the same as the original expression. */
14238 tree
14239 fold_ignored_result (tree t)
14241 if (!TREE_SIDE_EFFECTS (t))
14242 return integer_zero_node;
14244 for (;;)
14245 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14247 case tcc_unary:
14248 t = TREE_OPERAND (t, 0);
14249 break;
14251 case tcc_binary:
14252 case tcc_comparison:
14253 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14254 t = TREE_OPERAND (t, 0);
14255 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14256 t = TREE_OPERAND (t, 1);
14257 else
14258 return t;
14259 break;
14261 case tcc_expression:
14262 switch (TREE_CODE (t))
14264 case COMPOUND_EXPR:
14265 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14266 return t;
14267 t = TREE_OPERAND (t, 0);
14268 break;
14270 case COND_EXPR:
14271 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14272 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14273 return t;
14274 t = TREE_OPERAND (t, 0);
14275 break;
14277 default:
14278 return t;
14280 break;
14282 default:
14283 return t;
14287 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14288 This can only be applied to objects of a sizetype. */
14290 tree
14291 round_up (tree value, int divisor)
14293 tree div = NULL_TREE;
14295 gcc_assert (divisor > 0);
14296 if (divisor == 1)
14297 return value;
14299 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14300 have to do anything. Only do this when we are not given a const,
14301 because in that case, this check is more expensive than just
14302 doing it. */
14303 if (TREE_CODE (value) != INTEGER_CST)
14305 div = build_int_cst (TREE_TYPE (value), divisor);
14307 if (multiple_of_p (TREE_TYPE (value), value, div))
14308 return value;
14311 /* If divisor is a power of two, simplify this to bit manipulation. */
14312 if (divisor == (divisor & -divisor))
14314 if (TREE_CODE (value) == INTEGER_CST)
14316 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14317 unsigned HOST_WIDE_INT high;
14318 bool overflow_p;
14320 if ((low & (divisor - 1)) == 0)
14321 return value;
14323 overflow_p = TREE_OVERFLOW (value);
14324 high = TREE_INT_CST_HIGH (value);
14325 low &= ~(divisor - 1);
14326 low += divisor;
14327 if (low == 0)
14329 high++;
14330 if (high == 0)
14331 overflow_p = true;
14334 return force_fit_type_double (TREE_TYPE (value), low, high,
14335 -1, overflow_p);
14337 else
14339 tree t;
14341 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14342 value = size_binop (PLUS_EXPR, value, t);
14343 t = build_int_cst (TREE_TYPE (value), -divisor);
14344 value = size_binop (BIT_AND_EXPR, value, t);
14347 else
14349 if (!div)
14350 div = build_int_cst (TREE_TYPE (value), divisor);
14351 value = size_binop (CEIL_DIV_EXPR, value, div);
14352 value = size_binop (MULT_EXPR, value, div);
14355 return value;
14358 /* Likewise, but round down. */
14360 tree
14361 round_down (tree value, int divisor)
14363 tree div = NULL_TREE;
14365 gcc_assert (divisor > 0);
14366 if (divisor == 1)
14367 return value;
14369 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14370 have to do anything. Only do this when we are not given a const,
14371 because in that case, this check is more expensive than just
14372 doing it. */
14373 if (TREE_CODE (value) != INTEGER_CST)
14375 div = build_int_cst (TREE_TYPE (value), divisor);
14377 if (multiple_of_p (TREE_TYPE (value), value, div))
14378 return value;
14381 /* If divisor is a power of two, simplify this to bit manipulation. */
14382 if (divisor == (divisor & -divisor))
14384 tree t;
14386 t = build_int_cst (TREE_TYPE (value), -divisor);
14387 value = size_binop (BIT_AND_EXPR, value, t);
14389 else
14391 if (!div)
14392 div = build_int_cst (TREE_TYPE (value), divisor);
14393 value = size_binop (FLOOR_DIV_EXPR, value, div);
14394 value = size_binop (MULT_EXPR, value, div);
14397 return value;
14400 /* Returns the pointer to the base of the object addressed by EXP and
14401 extracts the information about the offset of the access, storing it
14402 to PBITPOS and POFFSET. */
14404 static tree
14405 split_address_to_core_and_offset (tree exp,
14406 HOST_WIDE_INT *pbitpos, tree *poffset)
14408 tree core;
14409 enum machine_mode mode;
14410 int unsignedp, volatilep;
14411 HOST_WIDE_INT bitsize;
14413 if (TREE_CODE (exp) == ADDR_EXPR)
14415 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14416 poffset, &mode, &unsignedp, &volatilep,
14417 false);
14418 core = build_fold_addr_expr (core);
14420 else
14422 core = exp;
14423 *pbitpos = 0;
14424 *poffset = NULL_TREE;
14427 return core;
14430 /* Returns true if addresses of E1 and E2 differ by a constant, false
14431 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14433 bool
14434 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14436 tree core1, core2;
14437 HOST_WIDE_INT bitpos1, bitpos2;
14438 tree toffset1, toffset2, tdiff, type;
14440 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14441 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14443 if (bitpos1 % BITS_PER_UNIT != 0
14444 || bitpos2 % BITS_PER_UNIT != 0
14445 || !operand_equal_p (core1, core2, 0))
14446 return false;
14448 if (toffset1 && toffset2)
14450 type = TREE_TYPE (toffset1);
14451 if (type != TREE_TYPE (toffset2))
14452 toffset2 = fold_convert (type, toffset2);
14454 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14455 if (!cst_and_fits_in_hwi (tdiff))
14456 return false;
14458 *diff = int_cst_value (tdiff);
14460 else if (toffset1 || toffset2)
14462 /* If only one of the offsets is non-constant, the difference cannot
14463 be a constant. */
14464 return false;
14466 else
14467 *diff = 0;
14469 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14470 return true;
14473 /* Simplify the floating point expression EXP when the sign of the
14474 result is not significant. Return NULL_TREE if no simplification
14475 is possible. */
14477 tree
14478 fold_strip_sign_ops (tree exp)
14480 tree arg0, arg1;
14482 switch (TREE_CODE (exp))
14484 case ABS_EXPR:
14485 case NEGATE_EXPR:
14486 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14487 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14489 case MULT_EXPR:
14490 case RDIV_EXPR:
14491 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14492 return NULL_TREE;
14493 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14494 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14495 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14496 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14497 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14498 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14499 break;
14501 case COMPOUND_EXPR:
14502 arg0 = TREE_OPERAND (exp, 0);
14503 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14504 if (arg1)
14505 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14506 break;
14508 case COND_EXPR:
14509 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14510 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14511 if (arg0 || arg1)
14512 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14513 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14514 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14515 break;
14517 case CALL_EXPR:
14519 const enum built_in_function fcode = builtin_mathfn_code (exp);
14520 switch (fcode)
14522 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14523 /* Strip copysign function call, return the 1st argument. */
14524 arg0 = CALL_EXPR_ARG (exp, 0);
14525 arg1 = CALL_EXPR_ARG (exp, 1);
14526 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14528 default:
14529 /* Strip sign ops from the argument of "odd" math functions. */
14530 if (negate_mathfn_p (fcode))
14532 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14533 if (arg0)
14534 return build_call_expr (get_callee_fndecl (exp), 1, arg0);
14536 break;
14539 break;
14541 default:
14542 break;
14544 return NULL_TREE;